ncbateman commited on
Commit
ed450f2
·
verified ·
1 Parent(s): 3bf2dd3

Training in progress, step 800, checkpoint

Browse files
last-checkpoint/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fb23412629f2c38a013de847c54d08a17e2d60c8eebb9336357de1ef6093cc85
3
  size 792912
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7158fc026fcb3c75de92fe98a9e7575457f3d0ddfdd21de716103cd610f69de9
3
  size 792912
last-checkpoint/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:12c201fa6af0c7dae42b7caac127d4059e66de021daf54d7824a2150587c2a1f
3
  size 807226
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e19210c69c04c9e2774683c62f913e9f0f21be0a7cfcac2e1acf7c710d1c386
3
  size 807226
last-checkpoint/rng_state_0.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8711c592a178d9cd22956b8edbf920856ddbc92d7afd8179b11fb5f07a37307a
3
  size 14960
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22ad685c1237efec2f2e8de413bc38dd1475089fe9b28d8d43fe1c45017827ac
3
  size 14960
last-checkpoint/rng_state_1.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a1759aa5be29096275a99200c5cd7dd185201809851911e7e3019b68736798ba
3
  size 15024
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53dc596014d0f9b21145357c41c37d57500c8efda8a0f2014bb2d988a043e59f
3
  size 15024
last-checkpoint/rng_state_2.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5ea522a1b1ef4dd809b520fa67d2ff06ae96fd66ab2e66ee4a57c6732c9fb54c
3
  size 15024
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:223b0451f4d9b8dd81e35f5a28bc073e05755799383bf2e8538e323aed665d3b
3
  size 15024
last-checkpoint/rng_state_3.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:118c52a9d21da2b37ad908267686ca583d9530f4e1ec9252c871a44822e06873
3
  size 15024
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:18d17262a3d5928a646b0fefef89331d9cdec36c92fdf757662a6e2bf2165b6d
3
  size 15024
last-checkpoint/scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:32c9d6743104ecc5c305d5919f3858e0f838af944ceaa98f7f8a3401e1bc6552
3
  size 1064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f17ccafd97486ba1c378a3ba81af6282c7ec0337562d5f30dc98873ace309528
3
  size 1064
last-checkpoint/trainer_state.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 0.863664404688464,
5
  "eval_steps": 203,
6
- "global_step": 700,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
@@ -4939,6 +4939,706 @@
4939
  "learning_rate": 5.158265042373672e-06,
4940
  "loss": 4.0688,
4941
  "step": 700
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4942
  }
4943
  ],
4944
  "logging_steps": 1,
@@ -4958,7 +5658,7 @@
4958
  "attributes": {}
4959
  }
4960
  },
4961
- "total_flos": 4653307624685568.0,
4962
  "train_batch_size": 2,
4963
  "trial_name": null,
4964
  "trial_params": null
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 0.987045033929673,
5
  "eval_steps": 203,
6
+ "global_step": 800,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
 
4939
  "learning_rate": 5.158265042373672e-06,
4940
  "loss": 4.0688,
4941
  "step": 700
4942
+ },
4943
+ {
4944
+ "epoch": 0.864898210980876,
4945
+ "grad_norm": 1199.1778564453125,
4946
+ "learning_rate": 5.067337733271083e-06,
4947
+ "loss": 4.1535,
4948
+ "step": 701
4949
+ },
4950
+ {
4951
+ "epoch": 0.8661320172732881,
4952
+ "grad_norm": 1050.1751708984375,
4953
+ "learning_rate": 4.977176182915727e-06,
4954
+ "loss": 4.1608,
4955
+ "step": 702
4956
+ },
4957
+ {
4958
+ "epoch": 0.8673658235657001,
4959
+ "grad_norm": 601.8255615234375,
4960
+ "learning_rate": 4.887781927873458e-06,
4961
+ "loss": 4.0056,
4962
+ "step": 703
4963
+ },
4964
+ {
4965
+ "epoch": 0.8685996298581122,
4966
+ "grad_norm": 781.65283203125,
4967
+ "learning_rate": 4.799156491633655e-06,
4968
+ "loss": 4.2278,
4969
+ "step": 704
4970
+ },
4971
+ {
4972
+ "epoch": 0.8698334361505243,
4973
+ "grad_norm": 953.7297973632812,
4974
+ "learning_rate": 4.711301384583183e-06,
4975
+ "loss": 3.9453,
4976
+ "step": 705
4977
+ },
4978
+ {
4979
+ "epoch": 0.8710672424429364,
4980
+ "grad_norm": 682.08837890625,
4981
+ "learning_rate": 4.624218103980665e-06,
4982
+ "loss": 3.9502,
4983
+ "step": 706
4984
+ },
4985
+ {
4986
+ "epoch": 0.8723010487353485,
4987
+ "grad_norm": 999.5166625976562,
4988
+ "learning_rate": 4.537908133931018e-06,
4989
+ "loss": 4.0734,
4990
+ "step": 707
4991
+ },
4992
+ {
4993
+ "epoch": 0.8735348550277606,
4994
+ "grad_norm": 1854.658935546875,
4995
+ "learning_rate": 4.452372945360072e-06,
4996
+ "loss": 4.0023,
4997
+ "step": 708
4998
+ },
4999
+ {
5000
+ "epoch": 0.8747686613201727,
5001
+ "grad_norm": 974.3369750976562,
5002
+ "learning_rate": 4.367613995989589e-06,
5003
+ "loss": 4.1085,
5004
+ "step": 709
5005
+ },
5006
+ {
5007
+ "epoch": 0.8760024676125848,
5008
+ "grad_norm": 929.5103759765625,
5009
+ "learning_rate": 4.283632730312348e-06,
5010
+ "loss": 4.2707,
5011
+ "step": 710
5012
+ },
5013
+ {
5014
+ "epoch": 0.8772362739049969,
5015
+ "grad_norm": 820.7687377929688,
5016
+ "learning_rate": 4.200430579567571e-06,
5017
+ "loss": 4.0151,
5018
+ "step": 711
5019
+ },
5020
+ {
5021
+ "epoch": 0.878470080197409,
5022
+ "grad_norm": 851.3623657226562,
5023
+ "learning_rate": 4.118008961716552e-06,
5024
+ "loss": 4.1227,
5025
+ "step": 712
5026
+ },
5027
+ {
5028
+ "epoch": 0.8797038864898211,
5029
+ "grad_norm": 1031.295166015625,
5030
+ "learning_rate": 4.0363692814184e-06,
5031
+ "loss": 4.0757,
5032
+ "step": 713
5033
+ },
5034
+ {
5035
+ "epoch": 0.8809376927822332,
5036
+ "grad_norm": 1569.51318359375,
5037
+ "learning_rate": 3.955512930006222e-06,
5038
+ "loss": 4.3649,
5039
+ "step": 714
5040
+ },
5041
+ {
5042
+ "epoch": 0.8821714990746453,
5043
+ "grad_norm": 1083.7933349609375,
5044
+ "learning_rate": 3.87544128546331e-06,
5045
+ "loss": 4.0284,
5046
+ "step": 715
5047
+ },
5048
+ {
5049
+ "epoch": 0.8834053053670574,
5050
+ "grad_norm": 761.168212890625,
5051
+ "learning_rate": 3.7961557123997018e-06,
5052
+ "loss": 4.2838,
5053
+ "step": 716
5054
+ },
5055
+ {
5056
+ "epoch": 0.8846391116594695,
5057
+ "grad_norm": 821.79443359375,
5058
+ "learning_rate": 3.7176575620289368e-06,
5059
+ "loss": 4.0374,
5060
+ "step": 717
5061
+ },
5062
+ {
5063
+ "epoch": 0.8858729179518815,
5064
+ "grad_norm": 877.775146484375,
5065
+ "learning_rate": 3.6399481721449857e-06,
5066
+ "loss": 4.174,
5067
+ "step": 718
5068
+ },
5069
+ {
5070
+ "epoch": 0.8871067242442936,
5071
+ "grad_norm": 701.419677734375,
5072
+ "learning_rate": 3.563028867099505e-06,
5073
+ "loss": 3.98,
5074
+ "step": 719
5075
+ },
5076
+ {
5077
+ "epoch": 0.8883405305367057,
5078
+ "grad_norm": 802.7510375976562,
5079
+ "learning_rate": 3.4869009577792157e-06,
5080
+ "loss": 4.0774,
5081
+ "step": 720
5082
+ },
5083
+ {
5084
+ "epoch": 0.8895743368291178,
5085
+ "grad_norm": 699.1093139648438,
5086
+ "learning_rate": 3.4115657415835835e-06,
5087
+ "loss": 4.0156,
5088
+ "step": 721
5089
+ },
5090
+ {
5091
+ "epoch": 0.8908081431215299,
5092
+ "grad_norm": 733.71484375,
5093
+ "learning_rate": 3.3370245024027414e-06,
5094
+ "loss": 4.2041,
5095
+ "step": 722
5096
+ },
5097
+ {
5098
+ "epoch": 0.892041949413942,
5099
+ "grad_norm": 527.6852416992188,
5100
+ "learning_rate": 3.2632785105955467e-06,
5101
+ "loss": 3.9713,
5102
+ "step": 723
5103
+ },
5104
+ {
5105
+ "epoch": 0.8932757557063541,
5106
+ "grad_norm": 798.1897583007812,
5107
+ "learning_rate": 3.190329022967975e-06,
5108
+ "loss": 4.0734,
5109
+ "step": 724
5110
+ },
5111
+ {
5112
+ "epoch": 0.8945095619987662,
5113
+ "grad_norm": 723.0835571289062,
5114
+ "learning_rate": 3.1181772827516664e-06,
5115
+ "loss": 4.0981,
5116
+ "step": 725
5117
+ },
5118
+ {
5119
+ "epoch": 0.8957433682911783,
5120
+ "grad_norm": 783.1177978515625,
5121
+ "learning_rate": 3.046824519582808e-06,
5122
+ "loss": 4.0234,
5123
+ "step": 726
5124
+ },
5125
+ {
5126
+ "epoch": 0.8969771745835904,
5127
+ "grad_norm": 666.199462890625,
5128
+ "learning_rate": 2.9762719494810855e-06,
5129
+ "loss": 3.9911,
5130
+ "step": 727
5131
+ },
5132
+ {
5133
+ "epoch": 0.8982109808760025,
5134
+ "grad_norm": 901.4703369140625,
5135
+ "learning_rate": 2.9065207748290134e-06,
5136
+ "loss": 4.3203,
5137
+ "step": 728
5138
+ },
5139
+ {
5140
+ "epoch": 0.8994447871684146,
5141
+ "grad_norm": 615.899169921875,
5142
+ "learning_rate": 2.83757218435145e-06,
5143
+ "loss": 4.1368,
5144
+ "step": 729
5145
+ },
5146
+ {
5147
+ "epoch": 0.9006785934608267,
5148
+ "grad_norm": 953.3482666015625,
5149
+ "learning_rate": 2.7694273530953163e-06,
5150
+ "loss": 4.2854,
5151
+ "step": 730
5152
+ },
5153
+ {
5154
+ "epoch": 0.9019123997532388,
5155
+ "grad_norm": 939.6257934570312,
5156
+ "learning_rate": 2.702087442409551e-06,
5157
+ "loss": 4.0877,
5158
+ "step": 731
5159
+ },
5160
+ {
5161
+ "epoch": 0.9031462060456509,
5162
+ "grad_norm": 666.4422607421875,
5163
+ "learning_rate": 2.6355535999253887e-06,
5164
+ "loss": 4.1066,
5165
+ "step": 732
5166
+ },
5167
+ {
5168
+ "epoch": 0.904380012338063,
5169
+ "grad_norm": 737.6424560546875,
5170
+ "learning_rate": 2.5698269595367254e-06,
5171
+ "loss": 4.1649,
5172
+ "step": 733
5173
+ },
5174
+ {
5175
+ "epoch": 0.905613818630475,
5176
+ "grad_norm": 659.7061157226562,
5177
+ "learning_rate": 2.5049086413808377e-06,
5178
+ "loss": 4.0905,
5179
+ "step": 734
5180
+ },
5181
+ {
5182
+ "epoch": 0.9068476249228871,
5183
+ "grad_norm": 725.164306640625,
5184
+ "learning_rate": 2.4407997518192728e-06,
5185
+ "loss": 3.9756,
5186
+ "step": 735
5187
+ },
5188
+ {
5189
+ "epoch": 0.9080814312152992,
5190
+ "grad_norm": 1040.10205078125,
5191
+ "learning_rate": 2.377501383419006e-06,
5192
+ "loss": 4.0973,
5193
+ "step": 736
5194
+ },
5195
+ {
5196
+ "epoch": 0.9093152375077113,
5197
+ "grad_norm": 933.7422485351562,
5198
+ "learning_rate": 2.3150146149338247e-06,
5199
+ "loss": 3.9678,
5200
+ "step": 737
5201
+ },
5202
+ {
5203
+ "epoch": 0.9105490438001234,
5204
+ "grad_norm": 729.0170288085938,
5205
+ "learning_rate": 2.253340511285923e-06,
5206
+ "loss": 4.0576,
5207
+ "step": 738
5208
+ },
5209
+ {
5210
+ "epoch": 0.9117828500925355,
5211
+ "grad_norm": 700.053955078125,
5212
+ "learning_rate": 2.1924801235477743e-06,
5213
+ "loss": 4.2267,
5214
+ "step": 739
5215
+ },
5216
+ {
5217
+ "epoch": 0.9130166563849476,
5218
+ "grad_norm": 834.5526733398438,
5219
+ "learning_rate": 2.132434488924212e-06,
5220
+ "loss": 3.9349,
5221
+ "step": 740
5222
+ },
5223
+ {
5224
+ "epoch": 0.9142504626773597,
5225
+ "grad_norm": 1009.9600830078125,
5226
+ "learning_rate": 2.073204630734743e-06,
5227
+ "loss": 4.2046,
5228
+ "step": 741
5229
+ },
5230
+ {
5231
+ "epoch": 0.9154842689697718,
5232
+ "grad_norm": 581.45849609375,
5233
+ "learning_rate": 2.0147915583961173e-06,
5234
+ "loss": 4.1017,
5235
+ "step": 742
5236
+ },
5237
+ {
5238
+ "epoch": 0.9167180752621839,
5239
+ "grad_norm": 612.1376953125,
5240
+ "learning_rate": 1.9571962674051204e-06,
5241
+ "loss": 4.2051,
5242
+ "step": 743
5243
+ },
5244
+ {
5245
+ "epoch": 0.917951881554596,
5246
+ "grad_norm": 963.0797729492188,
5247
+ "learning_rate": 1.9004197393216294e-06,
5248
+ "loss": 4.1168,
5249
+ "step": 744
5250
+ },
5251
+ {
5252
+ "epoch": 0.919185687847008,
5253
+ "grad_norm": 852.234130859375,
5254
+ "learning_rate": 1.84446294175184e-06,
5255
+ "loss": 4.0354,
5256
+ "step": 745
5257
+ },
5258
+ {
5259
+ "epoch": 0.9204194941394201,
5260
+ "grad_norm": 1199.9132080078125,
5261
+ "learning_rate": 1.7893268283318276e-06,
5262
+ "loss": 4.3468,
5263
+ "step": 746
5264
+ },
5265
+ {
5266
+ "epoch": 0.9216533004318322,
5267
+ "grad_norm": 724.0450439453125,
5268
+ "learning_rate": 1.735012338711256e-06,
5269
+ "loss": 4.0663,
5270
+ "step": 747
5271
+ },
5272
+ {
5273
+ "epoch": 0.9228871067242443,
5274
+ "grad_norm": 807.9916381835938,
5275
+ "learning_rate": 1.681520398537373e-06,
5276
+ "loss": 4.116,
5277
+ "step": 748
5278
+ },
5279
+ {
5280
+ "epoch": 0.9241209130166563,
5281
+ "grad_norm": 1655.5943603515625,
5282
+ "learning_rate": 1.6288519194392615e-06,
5283
+ "loss": 4.157,
5284
+ "step": 749
5285
+ },
5286
+ {
5287
+ "epoch": 0.9253547193090684,
5288
+ "grad_norm": 559.2360229492188,
5289
+ "learning_rate": 1.5770077990122645e-06,
5290
+ "loss": 3.9687,
5291
+ "step": 750
5292
+ },
5293
+ {
5294
+ "epoch": 0.9265885256014805,
5295
+ "grad_norm": 1023.9130249023438,
5296
+ "learning_rate": 1.5259889208027013e-06,
5297
+ "loss": 4.1494,
5298
+ "step": 751
5299
+ },
5300
+ {
5301
+ "epoch": 0.9278223318938926,
5302
+ "grad_norm": 682.001220703125,
5303
+ "learning_rate": 1.4757961542928355e-06,
5304
+ "loss": 4.1164,
5305
+ "step": 752
5306
+ },
5307
+ {
5308
+ "epoch": 0.9290561381863047,
5309
+ "grad_norm": 781.3024291992188,
5310
+ "learning_rate": 1.4264303548859992e-06,
5311
+ "loss": 4.0709,
5312
+ "step": 753
5313
+ },
5314
+ {
5315
+ "epoch": 0.9302899444787168,
5316
+ "grad_norm": 1117.8253173828125,
5317
+ "learning_rate": 1.3778923638920971e-06,
5318
+ "loss": 4.2305,
5319
+ "step": 754
5320
+ },
5321
+ {
5322
+ "epoch": 0.9315237507711289,
5323
+ "grad_norm": 982.4514770507812,
5324
+ "learning_rate": 1.330183008513186e-06,
5325
+ "loss": 4.0738,
5326
+ "step": 755
5327
+ },
5328
+ {
5329
+ "epoch": 0.932757557063541,
5330
+ "grad_norm": 663.4027709960938,
5331
+ "learning_rate": 1.283303101829425e-06,
5332
+ "loss": 4.2843,
5333
+ "step": 756
5334
+ },
5335
+ {
5336
+ "epoch": 0.9339913633559531,
5337
+ "grad_norm": 605.0052490234375,
5338
+ "learning_rate": 1.2372534427852079e-06,
5339
+ "loss": 4.1355,
5340
+ "step": 757
5341
+ },
5342
+ {
5343
+ "epoch": 0.9352251696483652,
5344
+ "grad_norm": 678.015625,
5345
+ "learning_rate": 1.1920348161755413e-06,
5346
+ "loss": 4.1688,
5347
+ "step": 758
5348
+ },
5349
+ {
5350
+ "epoch": 0.9364589759407773,
5351
+ "grad_norm": 1134.179443359375,
5352
+ "learning_rate": 1.1476479926326945e-06,
5353
+ "loss": 4.2093,
5354
+ "step": 759
5355
+ },
5356
+ {
5357
+ "epoch": 0.9376927822331894,
5358
+ "grad_norm": 772.4798583984375,
5359
+ "learning_rate": 1.104093728613026e-06,
5360
+ "loss": 4.058,
5361
+ "step": 760
5362
+ },
5363
+ {
5364
+ "epoch": 0.9389265885256015,
5365
+ "grad_norm": 913.7644653320312,
5366
+ "learning_rate": 1.061372766384111e-06,
5367
+ "loss": 4.0062,
5368
+ "step": 761
5369
+ },
5370
+ {
5371
+ "epoch": 0.9401603948180136,
5372
+ "grad_norm": 660.7054443359375,
5373
+ "learning_rate": 1.0194858340121183e-06,
5374
+ "loss": 4.0886,
5375
+ "step": 762
5376
+ },
5377
+ {
5378
+ "epoch": 0.9413942011104257,
5379
+ "grad_norm": 673.8242797851562,
5380
+ "learning_rate": 9.784336453493414e-07,
5381
+ "loss": 4.1026,
5382
+ "step": 763
5383
+ },
5384
+ {
5385
+ "epoch": 0.9426280074028378,
5386
+ "grad_norm": 671.2329711914062,
5387
+ "learning_rate": 9.382169000221041e-07,
5388
+ "loss": 4.1491,
5389
+ "step": 764
5390
+ },
5391
+ {
5392
+ "epoch": 0.9438618136952498,
5393
+ "grad_norm": 581.8679809570312,
5394
+ "learning_rate": 8.988362834187747e-07,
5395
+ "loss": 4.0206,
5396
+ "step": 765
5397
+ },
5398
+ {
5399
+ "epoch": 0.9450956199876619,
5400
+ "grad_norm": 1120.507568359375,
5401
+ "learning_rate": 8.602924666781254e-07,
5402
+ "loss": 4.0462,
5403
+ "step": 766
5404
+ },
5405
+ {
5406
+ "epoch": 0.946329426280074,
5407
+ "grad_norm": 666.160400390625,
5408
+ "learning_rate": 8.225861066778806e-07,
5409
+ "loss": 4.0883,
5410
+ "step": 767
5411
+ },
5412
+ {
5413
+ "epoch": 0.9475632325724861,
5414
+ "grad_norm": 679.1992797851562,
5415
+ "learning_rate": 7.857178460235148e-07,
5416
+ "loss": 3.9554,
5417
+ "step": 768
5418
+ },
5419
+ {
5420
+ "epoch": 0.9487970388648982,
5421
+ "grad_norm": 750.4222412109375,
5422
+ "learning_rate": 7.496883130373167e-07,
5423
+ "loss": 4.1345,
5424
+ "step": 769
5425
+ },
5426
+ {
5427
+ "epoch": 0.9500308451573103,
5428
+ "grad_norm": 989.0843505859375,
5429
+ "learning_rate": 7.144981217476754e-07,
5430
+ "loss": 4.0929,
5431
+ "step": 770
5432
+ },
5433
+ {
5434
+ "epoch": 0.9512646514497224,
5435
+ "grad_norm": 751.5528564453125,
5436
+ "learning_rate": 6.801478718785947e-07,
5437
+ "loss": 4.0241,
5438
+ "step": 771
5439
+ },
5440
+ {
5441
+ "epoch": 0.9524984577421345,
5442
+ "grad_norm": 999.92431640625,
5443
+ "learning_rate": 6.46638148839529e-07,
5444
+ "loss": 4.0454,
5445
+ "step": 772
5446
+ },
5447
+ {
5448
+ "epoch": 0.9537322640345466,
5449
+ "grad_norm": 859.3124389648438,
5450
+ "learning_rate": 6.139695237153298e-07,
5451
+ "loss": 4.1067,
5452
+ "step": 773
5453
+ },
5454
+ {
5455
+ "epoch": 0.9549660703269587,
5456
+ "grad_norm": 935.9942626953125,
5457
+ "learning_rate": 5.821425532565816e-07,
5458
+ "loss": 4.1005,
5459
+ "step": 774
5460
+ },
5461
+ {
5462
+ "epoch": 0.9561998766193708,
5463
+ "grad_norm": 935.444580078125,
5464
+ "learning_rate": 5.511577798700596e-07,
5465
+ "loss": 4.1124,
5466
+ "step": 775
5467
+ },
5468
+ {
5469
+ "epoch": 0.9574336829117829,
5470
+ "grad_norm": 812.61328125,
5471
+ "learning_rate": 5.21015731609531e-07,
5472
+ "loss": 4.0986,
5473
+ "step": 776
5474
+ },
5475
+ {
5476
+ "epoch": 0.958667489204195,
5477
+ "grad_norm": 719.1353149414062,
5478
+ "learning_rate": 4.91716922166735e-07,
5479
+ "loss": 4.0122,
5480
+ "step": 777
5481
+ },
5482
+ {
5483
+ "epoch": 0.9599012954966071,
5484
+ "grad_norm": 614.0670776367188,
5485
+ "learning_rate": 4.632618508626063e-07,
5486
+ "loss": 4.2284,
5487
+ "step": 778
5488
+ },
5489
+ {
5490
+ "epoch": 0.9611351017890192,
5491
+ "grad_norm": 732.4361572265625,
5492
+ "learning_rate": 4.35651002638815e-07,
5493
+ "loss": 4.0553,
5494
+ "step": 779
5495
+ },
5496
+ {
5497
+ "epoch": 0.9623689080814312,
5498
+ "grad_norm": 739.580322265625,
5499
+ "learning_rate": 4.088848480494567e-07,
5500
+ "loss": 4.1197,
5501
+ "step": 780
5502
+ },
5503
+ {
5504
+ "epoch": 0.9636027143738433,
5505
+ "grad_norm": 630.6943359375,
5506
+ "learning_rate": 3.829638432530702e-07,
5507
+ "loss": 4.1957,
5508
+ "step": 781
5509
+ },
5510
+ {
5511
+ "epoch": 0.9648365206662554,
5512
+ "grad_norm": 787.4408569335938,
5513
+ "learning_rate": 3.5788843000481576e-07,
5514
+ "loss": 4.0408,
5515
+ "step": 782
5516
+ },
5517
+ {
5518
+ "epoch": 0.9660703269586675,
5519
+ "grad_norm": 656.0836181640625,
5520
+ "learning_rate": 3.336590356489977e-07,
5521
+ "loss": 4.0707,
5522
+ "step": 783
5523
+ },
5524
+ {
5525
+ "epoch": 0.9673041332510796,
5526
+ "grad_norm": 1066.859130859375,
5527
+ "learning_rate": 3.102760731117593e-07,
5528
+ "loss": 4.08,
5529
+ "step": 784
5530
+ },
5531
+ {
5532
+ "epoch": 0.9685379395434917,
5533
+ "grad_norm": 636.3853149414062,
5534
+ "learning_rate": 2.8773994089402734e-07,
5535
+ "loss": 4.1094,
5536
+ "step": 785
5537
+ },
5538
+ {
5539
+ "epoch": 0.9697717458359038,
5540
+ "grad_norm": 833.309814453125,
5541
+ "learning_rate": 2.6605102306476724e-07,
5542
+ "loss": 4.2544,
5543
+ "step": 786
5544
+ },
5545
+ {
5546
+ "epoch": 0.9710055521283159,
5547
+ "grad_norm": 769.7086181640625,
5548
+ "learning_rate": 2.452096892543776e-07,
5549
+ "loss": 4.152,
5550
+ "step": 787
5551
+ },
5552
+ {
5553
+ "epoch": 0.972239358420728,
5554
+ "grad_norm": 737.0515747070312,
5555
+ "learning_rate": 2.2521629464844486e-07,
5556
+ "loss": 4.2431,
5557
+ "step": 788
5558
+ },
5559
+ {
5560
+ "epoch": 0.97347316471314,
5561
+ "grad_norm": 683.5414428710938,
5562
+ "learning_rate": 2.0607117998165947e-07,
5563
+ "loss": 4.2063,
5564
+ "step": 789
5565
+ },
5566
+ {
5567
+ "epoch": 0.9747069710055521,
5568
+ "grad_norm": 725.9776000976562,
5569
+ "learning_rate": 1.8777467153202054e-07,
5570
+ "loss": 4.2039,
5571
+ "step": 790
5572
+ },
5573
+ {
5574
+ "epoch": 0.9759407772979642,
5575
+ "grad_norm": 562.0029907226562,
5576
+ "learning_rate": 1.703270811152624e-07,
5577
+ "loss": 4.1486,
5578
+ "step": 791
5579
+ },
5580
+ {
5581
+ "epoch": 0.9771745835903763,
5582
+ "grad_norm": 756.2742309570312,
5583
+ "learning_rate": 1.5372870607956446e-07,
5584
+ "loss": 4.0233,
5585
+ "step": 792
5586
+ },
5587
+ {
5588
+ "epoch": 0.9784083898827884,
5589
+ "grad_norm": 777.6002197265625,
5590
+ "learning_rate": 1.3797982930044972e-07,
5591
+ "loss": 4.1716,
5592
+ "step": 793
5593
+ },
5594
+ {
5595
+ "epoch": 0.9796421961752005,
5596
+ "grad_norm": 1127.728759765625,
5597
+ "learning_rate": 1.2308071917601083e-07,
5598
+ "loss": 4.2959,
5599
+ "step": 794
5600
+ },
5601
+ {
5602
+ "epoch": 0.9808760024676126,
5603
+ "grad_norm": 827.7559814453125,
5604
+ "learning_rate": 1.0903162962228597e-07,
5605
+ "loss": 4.11,
5606
+ "step": 795
5607
+ },
5608
+ {
5609
+ "epoch": 0.9821098087600246,
5610
+ "grad_norm": 734.08984375,
5611
+ "learning_rate": 9.583280006895679e-08,
5612
+ "loss": 4.0029,
5613
+ "step": 796
5614
+ },
5615
+ {
5616
+ "epoch": 0.9833436150524367,
5617
+ "grad_norm": 1221.7593994140625,
5618
+ "learning_rate": 8.348445545527939e-08,
5619
+ "loss": 4.4357,
5620
+ "step": 797
5621
+ },
5622
+ {
5623
+ "epoch": 0.9845774213448488,
5624
+ "grad_norm": 632.0841674804688,
5625
+ "learning_rate": 7.198680622621523e-08,
5626
+ "loss": 4.0149,
5627
+ "step": 798
5628
+ },
5629
+ {
5630
+ "epoch": 0.9858112276372609,
5631
+ "grad_norm": 644.5614624023438,
5632
+ "learning_rate": 6.134004832888396e-08,
5633
+ "loss": 4.1734,
5634
+ "step": 799
5635
+ },
5636
+ {
5637
+ "epoch": 0.987045033929673,
5638
+ "grad_norm": 802.9095458984375,
5639
+ "learning_rate": 5.154436320919942e-08,
5640
+ "loss": 4.0969,
5641
+ "step": 800
5642
  }
5643
  ],
5644
  "logging_steps": 1,
 
5658
  "attributes": {}
5659
  }
5660
  },
5661
+ "total_flos": 5313340347777024.0,
5662
  "train_batch_size": 2,
5663
  "trial_name": null,
5664
  "trial_params": null