|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 225, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0044444444444444444, |
|
"grad_norm": 7.197563171386719, |
|
"learning_rate": 2.5000000000000004e-07, |
|
"loss": 1.3915, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008888888888888889, |
|
"grad_norm": 7.682923793792725, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 1.3854, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.013333333333333334, |
|
"grad_norm": 9.152216911315918, |
|
"learning_rate": 7.5e-07, |
|
"loss": 1.3159, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.017777777777777778, |
|
"grad_norm": 9.388493537902832, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.3236, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.022222222222222223, |
|
"grad_norm": 12.188794136047363, |
|
"learning_rate": 1.25e-06, |
|
"loss": 1.3502, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02666666666666667, |
|
"grad_norm": 12.245216369628906, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.404, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03111111111111111, |
|
"grad_norm": 19.210506439208984, |
|
"learning_rate": 1.75e-06, |
|
"loss": 1.4199, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.035555555555555556, |
|
"grad_norm": 9.991368293762207, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.3316, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.724348545074463, |
|
"learning_rate": 2.25e-06, |
|
"loss": 1.4482, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.044444444444444446, |
|
"grad_norm": 8.962543487548828, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.29, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04888888888888889, |
|
"grad_norm": 7.128359317779541, |
|
"learning_rate": 2.7500000000000004e-06, |
|
"loss": 1.3139, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05333333333333334, |
|
"grad_norm": 11.580962181091309, |
|
"learning_rate": 3e-06, |
|
"loss": 1.3238, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.057777777777777775, |
|
"grad_norm": 8.380171775817871, |
|
"learning_rate": 3.2500000000000002e-06, |
|
"loss": 1.3434, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06222222222222222, |
|
"grad_norm": 5.92144250869751, |
|
"learning_rate": 3.5e-06, |
|
"loss": 1.2896, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06666666666666667, |
|
"grad_norm": 8.112162590026855, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 1.2117, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07111111111111111, |
|
"grad_norm": 3.5406394004821777, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.2094, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07555555555555556, |
|
"grad_norm": 22.047605514526367, |
|
"learning_rate": 4.25e-06, |
|
"loss": 1.2027, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.793862819671631, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.2434, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.08444444444444445, |
|
"grad_norm": 9.382174491882324, |
|
"learning_rate": 4.75e-06, |
|
"loss": 1.3183, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.08888888888888889, |
|
"grad_norm": 5.871135234832764, |
|
"learning_rate": 5e-06, |
|
"loss": 1.2432, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09333333333333334, |
|
"grad_norm": 17.656475067138672, |
|
"learning_rate": 5.2500000000000006e-06, |
|
"loss": 1.1843, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.09777777777777778, |
|
"grad_norm": 10.098402976989746, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 1.3401, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.10222222222222223, |
|
"grad_norm": 23.264266967773438, |
|
"learning_rate": 5.75e-06, |
|
"loss": 1.3025, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.10666666666666667, |
|
"grad_norm": 4.140642166137695, |
|
"learning_rate": 6e-06, |
|
"loss": 1.2141, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 6.262714862823486, |
|
"learning_rate": 6.25e-06, |
|
"loss": 1.2665, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.11555555555555555, |
|
"grad_norm": 6.946407794952393, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 1.1989, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.5363879203796387, |
|
"learning_rate": 6.750000000000001e-06, |
|
"loss": 1.1771, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.12444444444444444, |
|
"grad_norm": 3.964658737182617, |
|
"learning_rate": 7e-06, |
|
"loss": 1.2362, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.1288888888888889, |
|
"grad_norm": 2.513711452484131, |
|
"learning_rate": 7.25e-06, |
|
"loss": 1.2203, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.13333333333333333, |
|
"grad_norm": 2.6071319580078125, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.167, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.13777777777777778, |
|
"grad_norm": 3.0134849548339844, |
|
"learning_rate": 7.75e-06, |
|
"loss": 1.2139, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.14222222222222222, |
|
"grad_norm": 2.4809670448303223, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.1603, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.14666666666666667, |
|
"grad_norm": 3.1294925212860107, |
|
"learning_rate": 8.25e-06, |
|
"loss": 1.1922, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.1511111111111111, |
|
"grad_norm": 2.703998565673828, |
|
"learning_rate": 8.5e-06, |
|
"loss": 1.1858, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.15555555555555556, |
|
"grad_norm": 4.653899192810059, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 1.2678, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.976705312728882, |
|
"learning_rate": 9e-06, |
|
"loss": 1.2004, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.16444444444444445, |
|
"grad_norm": 3.332261323928833, |
|
"learning_rate": 9.250000000000001e-06, |
|
"loss": 1.2128, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1688888888888889, |
|
"grad_norm": 2.7455172538757324, |
|
"learning_rate": 9.5e-06, |
|
"loss": 1.2348, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.17333333333333334, |
|
"grad_norm": 2.5534517765045166, |
|
"learning_rate": 9.75e-06, |
|
"loss": 1.1752, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.17777777777777778, |
|
"grad_norm": 2.2505180835723877, |
|
"learning_rate": 1e-05, |
|
"loss": 1.1248, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.18222222222222223, |
|
"grad_norm": 3.083693265914917, |
|
"learning_rate": 9.999853218975136e-06, |
|
"loss": 1.1466, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.18666666666666668, |
|
"grad_norm": 2.871704339981079, |
|
"learning_rate": 9.99941288451841e-06, |
|
"loss": 1.1622, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.19111111111111112, |
|
"grad_norm": 2.9086086750030518, |
|
"learning_rate": 9.998679022482916e-06, |
|
"loss": 1.1584, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.19555555555555557, |
|
"grad_norm": 2.8458251953125, |
|
"learning_rate": 9.997651675955467e-06, |
|
"loss": 1.1556, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.2609152793884277, |
|
"learning_rate": 9.99633090525405e-06, |
|
"loss": 1.1991, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.20444444444444446, |
|
"grad_norm": 14.039426803588867, |
|
"learning_rate": 9.9947167879243e-06, |
|
"loss": 1.1318, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.2088888888888889, |
|
"grad_norm": 3.4546499252319336, |
|
"learning_rate": 9.992809418734932e-06, |
|
"loss": 1.1987, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.21333333333333335, |
|
"grad_norm": 3.3863046169281006, |
|
"learning_rate": 9.99060890967219e-06, |
|
"loss": 1.171, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.21777777777777776, |
|
"grad_norm": 2.217785596847534, |
|
"learning_rate": 9.988115389933263e-06, |
|
"loss": 1.124, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 3.8226356506347656, |
|
"learning_rate": 9.985329005918702e-06, |
|
"loss": 1.2048, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.22666666666666666, |
|
"grad_norm": 4.5612688064575195, |
|
"learning_rate": 9.982249921223833e-06, |
|
"loss": 1.1899, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2311111111111111, |
|
"grad_norm": 3.4833076000213623, |
|
"learning_rate": 9.978878316629132e-06, |
|
"loss": 1.1745, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.23555555555555555, |
|
"grad_norm": 2.259718894958496, |
|
"learning_rate": 9.975214390089637e-06, |
|
"loss": 1.1016, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.242814779281616, |
|
"learning_rate": 9.9712583567233e-06, |
|
"loss": 1.1984, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.24444444444444444, |
|
"grad_norm": 4.208090305328369, |
|
"learning_rate": 9.967010448798376e-06, |
|
"loss": 1.1899, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.24888888888888888, |
|
"grad_norm": 2.8487319946289062, |
|
"learning_rate": 9.962470915719775e-06, |
|
"loss": 1.1735, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.25333333333333335, |
|
"grad_norm": 2.4001946449279785, |
|
"learning_rate": 9.957640024014426e-06, |
|
"loss": 1.1199, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2577777777777778, |
|
"grad_norm": 4.717513561248779, |
|
"learning_rate": 9.952518057315624e-06, |
|
"loss": 1.1234, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.26222222222222225, |
|
"grad_norm": 2.7141494750976562, |
|
"learning_rate": 9.947105316346372e-06, |
|
"loss": 1.1619, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 2.610142946243286, |
|
"learning_rate": 9.941402118901743e-06, |
|
"loss": 1.2141, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.27111111111111114, |
|
"grad_norm": 3.9055914878845215, |
|
"learning_rate": 9.9354087998302e-06, |
|
"loss": 1.1778, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.27555555555555555, |
|
"grad_norm": 3.3144619464874268, |
|
"learning_rate": 9.929125711013952e-06, |
|
"loss": 1.1574, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.429931640625, |
|
"learning_rate": 9.922553221348281e-06, |
|
"loss": 1.1524, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.28444444444444444, |
|
"grad_norm": 2.495755195617676, |
|
"learning_rate": 9.915691716719899e-06, |
|
"loss": 1.1692, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.28888888888888886, |
|
"grad_norm": 1.96566903591156, |
|
"learning_rate": 9.908541599984276e-06, |
|
"loss": 1.0634, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.29333333333333333, |
|
"grad_norm": 2.161886215209961, |
|
"learning_rate": 9.901103290941996e-06, |
|
"loss": 1.1318, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.29777777777777775, |
|
"grad_norm": 2.408609390258789, |
|
"learning_rate": 9.893377226314113e-06, |
|
"loss": 1.118, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3022222222222222, |
|
"grad_norm": 2.3472368717193604, |
|
"learning_rate": 9.885363859716497e-06, |
|
"loss": 1.1064, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.30666666666666664, |
|
"grad_norm": 4.991092681884766, |
|
"learning_rate": 9.877063661633213e-06, |
|
"loss": 1.1629, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.3111111111111111, |
|
"grad_norm": 2.2165753841400146, |
|
"learning_rate": 9.868477119388897e-06, |
|
"loss": 1.0692, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.31555555555555553, |
|
"grad_norm": 3.0987343788146973, |
|
"learning_rate": 9.859604737120131e-06, |
|
"loss": 1.1878, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 3.5139403343200684, |
|
"learning_rate": 9.850447035745868e-06, |
|
"loss": 1.1443, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3244444444444444, |
|
"grad_norm": 2.9309511184692383, |
|
"learning_rate": 9.841004552936817e-06, |
|
"loss": 1.1986, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.3288888888888889, |
|
"grad_norm": 2.642705202102661, |
|
"learning_rate": 9.831277843083904e-06, |
|
"loss": 1.1076, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 2.53140926361084, |
|
"learning_rate": 9.821267477265705e-06, |
|
"loss": 1.1432, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3377777777777778, |
|
"grad_norm": 3.0417535305023193, |
|
"learning_rate": 9.810974043214923e-06, |
|
"loss": 1.1889, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3422222222222222, |
|
"grad_norm": 2.1667232513427734, |
|
"learning_rate": 9.800398145283874e-06, |
|
"loss": 1.1088, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3466666666666667, |
|
"grad_norm": 2.451881170272827, |
|
"learning_rate": 9.789540404409017e-06, |
|
"loss": 1.125, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.3511111111111111, |
|
"grad_norm": 5.966605186462402, |
|
"learning_rate": 9.778401458074482e-06, |
|
"loss": 1.2031, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.35555555555555557, |
|
"grad_norm": 2.767612934112549, |
|
"learning_rate": 9.766981960274653e-06, |
|
"loss": 1.1299, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.8305821418762207, |
|
"learning_rate": 9.755282581475769e-06, |
|
"loss": 1.106, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.36444444444444446, |
|
"grad_norm": 2.256387710571289, |
|
"learning_rate": 9.74330400857655e-06, |
|
"loss": 1.1994, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3688888888888889, |
|
"grad_norm": 2.9722063541412354, |
|
"learning_rate": 9.731046944867883e-06, |
|
"loss": 1.1182, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.37333333333333335, |
|
"grad_norm": 3.528620958328247, |
|
"learning_rate": 9.718512109991516e-06, |
|
"loss": 1.2216, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.37777777777777777, |
|
"grad_norm": 2.0519301891326904, |
|
"learning_rate": 9.705700239897809e-06, |
|
"loss": 1.1228, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.38222222222222224, |
|
"grad_norm": 2.4317550659179688, |
|
"learning_rate": 9.692612086802536e-06, |
|
"loss": 1.164, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.38666666666666666, |
|
"grad_norm": 3.0390419960021973, |
|
"learning_rate": 9.679248419142704e-06, |
|
"loss": 1.2052, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.39111111111111113, |
|
"grad_norm": 2.4871633052825928, |
|
"learning_rate": 9.665610021531447e-06, |
|
"loss": 1.1605, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.39555555555555555, |
|
"grad_norm": 3.762112617492676, |
|
"learning_rate": 9.651697694711959e-06, |
|
"loss": 1.1504, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 18.623851776123047, |
|
"learning_rate": 9.637512255510475e-06, |
|
"loss": 1.173, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.40444444444444444, |
|
"grad_norm": 2.8003592491149902, |
|
"learning_rate": 9.623054536788315e-06, |
|
"loss": 1.1239, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.4088888888888889, |
|
"grad_norm": 2.560473918914795, |
|
"learning_rate": 9.608325387392988e-06, |
|
"loss": 1.1203, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.41333333333333333, |
|
"grad_norm": 1.885018229484558, |
|
"learning_rate": 9.593325672108352e-06, |
|
"loss": 1.142, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.4177777777777778, |
|
"grad_norm": 4.514256000518799, |
|
"learning_rate": 9.578056271603837e-06, |
|
"loss": 1.1398, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.4222222222222222, |
|
"grad_norm": 2.7508609294891357, |
|
"learning_rate": 9.562518082382751e-06, |
|
"loss": 1.1355, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.4266666666666667, |
|
"grad_norm": 2.800530433654785, |
|
"learning_rate": 9.546712016729625e-06, |
|
"loss": 1.1512, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.4311111111111111, |
|
"grad_norm": 3.151304006576538, |
|
"learning_rate": 9.530639002656665e-06, |
|
"loss": 1.1215, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.43555555555555553, |
|
"grad_norm": 2.7462916374206543, |
|
"learning_rate": 9.514299983849267e-06, |
|
"loss": 1.1412, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.8580820560455322, |
|
"learning_rate": 9.497695919610595e-06, |
|
"loss": 1.088, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 2.2820510864257812, |
|
"learning_rate": 9.480827784805278e-06, |
|
"loss": 1.1844, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4488888888888889, |
|
"grad_norm": 1.8663792610168457, |
|
"learning_rate": 9.463696569802163e-06, |
|
"loss": 1.1218, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.4533333333333333, |
|
"grad_norm": 2.3818678855895996, |
|
"learning_rate": 9.446303280416167e-06, |
|
"loss": 1.186, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.4577777777777778, |
|
"grad_norm": 1.8850212097167969, |
|
"learning_rate": 9.428648937849227e-06, |
|
"loss": 1.0865, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4622222222222222, |
|
"grad_norm": 2.012317419052124, |
|
"learning_rate": 9.410734578630344e-06, |
|
"loss": 1.0468, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.4666666666666667, |
|
"grad_norm": 2.9131033420562744, |
|
"learning_rate": 9.392561254554712e-06, |
|
"loss": 1.1631, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4711111111111111, |
|
"grad_norm": 3.2192587852478027, |
|
"learning_rate": 9.374130032621993e-06, |
|
"loss": 1.1361, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.47555555555555556, |
|
"grad_norm": 2.818880319595337, |
|
"learning_rate": 9.355441994973639e-06, |
|
"loss": 1.1578, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 3.6746394634246826, |
|
"learning_rate": 9.336498238829383e-06, |
|
"loss": 1.1542, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.48444444444444446, |
|
"grad_norm": 3.4095306396484375, |
|
"learning_rate": 9.317299876422797e-06, |
|
"loss": 1.142, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.4888888888888889, |
|
"grad_norm": 2.7571191787719727, |
|
"learning_rate": 9.297848034936007e-06, |
|
"loss": 1.1133, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.49333333333333335, |
|
"grad_norm": 2.1086196899414062, |
|
"learning_rate": 9.278143856433503e-06, |
|
"loss": 1.1164, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.49777777777777776, |
|
"grad_norm": 2.419229507446289, |
|
"learning_rate": 9.258188497795093e-06, |
|
"loss": 1.1713, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.5022222222222222, |
|
"grad_norm": 2.4053385257720947, |
|
"learning_rate": 9.237983130647973e-06, |
|
"loss": 1.1185, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.5066666666666667, |
|
"grad_norm": 2.489058017730713, |
|
"learning_rate": 9.217528941297942e-06, |
|
"loss": 1.168, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.5111111111111111, |
|
"grad_norm": 2.2254421710968018, |
|
"learning_rate": 9.196827130659752e-06, |
|
"loss": 1.1715, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5155555555555555, |
|
"grad_norm": 2.17338228225708, |
|
"learning_rate": 9.175878914186591e-06, |
|
"loss": 1.1486, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 3.704568386077881, |
|
"learning_rate": 9.154685521798736e-06, |
|
"loss": 1.2477, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5244444444444445, |
|
"grad_norm": 2.340566396713257, |
|
"learning_rate": 9.13324819781133e-06, |
|
"loss": 1.1748, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.5288888888888889, |
|
"grad_norm": 2.876037359237671, |
|
"learning_rate": 9.111568200861324e-06, |
|
"loss": 1.1003, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 1.6906325817108154, |
|
"learning_rate": 9.089646803833589e-06, |
|
"loss": 1.0845, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5377777777777778, |
|
"grad_norm": 2.1755309104919434, |
|
"learning_rate": 9.067485293786173e-06, |
|
"loss": 1.2046, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5422222222222223, |
|
"grad_norm": 2.471137046813965, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 1.1749, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5466666666666666, |
|
"grad_norm": 2.135450839996338, |
|
"learning_rate": 9.022447153276169e-06, |
|
"loss": 1.1702, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5511111111111111, |
|
"grad_norm": 2.0650486946105957, |
|
"learning_rate": 8.99957316711135e-06, |
|
"loss": 1.1009, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 2.5196189880371094, |
|
"learning_rate": 8.976464356367133e-06, |
|
"loss": 1.1063, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 2.1774826049804688, |
|
"learning_rate": 8.95312207781749e-06, |
|
"loss": 1.1902, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5644444444444444, |
|
"grad_norm": 2.327805757522583, |
|
"learning_rate": 8.929547701943849e-06, |
|
"loss": 1.1152, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5688888888888889, |
|
"grad_norm": 3.271408796310425, |
|
"learning_rate": 8.905742612854628e-06, |
|
"loss": 1.1846, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.5733333333333334, |
|
"grad_norm": 2.898059844970703, |
|
"learning_rate": 8.881708208203977e-06, |
|
"loss": 1.16, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5777777777777777, |
|
"grad_norm": 2.6147210597991943, |
|
"learning_rate": 8.857445899109716e-06, |
|
"loss": 1.12, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5822222222222222, |
|
"grad_norm": 3.8312957286834717, |
|
"learning_rate": 8.832957110070482e-06, |
|
"loss": 1.1535, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5866666666666667, |
|
"grad_norm": 3.3056952953338623, |
|
"learning_rate": 8.808243278882094e-06, |
|
"loss": 1.0797, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.5911111111111111, |
|
"grad_norm": 3.1664161682128906, |
|
"learning_rate": 8.783305856553143e-06, |
|
"loss": 1.1774, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.5955555555555555, |
|
"grad_norm": 2.9546732902526855, |
|
"learning_rate": 8.758146307219793e-06, |
|
"loss": 1.213, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.9610460996627808, |
|
"learning_rate": 8.732766108059814e-06, |
|
"loss": 1.167, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6044444444444445, |
|
"grad_norm": 2.216212749481201, |
|
"learning_rate": 8.707166749205867e-06, |
|
"loss": 1.0953, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.6088888888888889, |
|
"grad_norm": 5.400648593902588, |
|
"learning_rate": 8.681349733658002e-06, |
|
"loss": 1.085, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.6133333333333333, |
|
"grad_norm": 2.8066205978393555, |
|
"learning_rate": 8.65531657719542e-06, |
|
"loss": 1.1447, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.6177777777777778, |
|
"grad_norm": 3.9573774337768555, |
|
"learning_rate": 8.629068808287476e-06, |
|
"loss": 1.1372, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.6222222222222222, |
|
"grad_norm": 2.352977991104126, |
|
"learning_rate": 8.602607968003935e-06, |
|
"loss": 1.1005, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6266666666666667, |
|
"grad_norm": 3.9835476875305176, |
|
"learning_rate": 8.575935609924505e-06, |
|
"loss": 1.1974, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6311111111111111, |
|
"grad_norm": 2.4645981788635254, |
|
"learning_rate": 8.549053300047602e-06, |
|
"loss": 1.0944, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.6355555555555555, |
|
"grad_norm": 1.7835776805877686, |
|
"learning_rate": 8.521962616698428e-06, |
|
"loss": 1.1516, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 2.5801022052764893, |
|
"learning_rate": 8.494665150436288e-06, |
|
"loss": 1.1421, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.6444444444444445, |
|
"grad_norm": 2.424285650253296, |
|
"learning_rate": 8.467162503961209e-06, |
|
"loss": 1.147, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6488888888888888, |
|
"grad_norm": 2.7262933254241943, |
|
"learning_rate": 8.439456292019849e-06, |
|
"loss": 1.1269, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6533333333333333, |
|
"grad_norm": 1.9822264909744263, |
|
"learning_rate": 8.411548141310683e-06, |
|
"loss": 1.1161, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6577777777777778, |
|
"grad_norm": 1.773859977722168, |
|
"learning_rate": 8.383439690388491e-06, |
|
"loss": 1.0834, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6622222222222223, |
|
"grad_norm": 2.605008363723755, |
|
"learning_rate": 8.35513258956817e-06, |
|
"loss": 1.1789, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 2.594944477081299, |
|
"learning_rate": 8.326628500827826e-06, |
|
"loss": 1.1489, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6711111111111111, |
|
"grad_norm": 2.652006149291992, |
|
"learning_rate": 8.297929097711207e-06, |
|
"loss": 1.1225, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.6755555555555556, |
|
"grad_norm": 3.7691638469696045, |
|
"learning_rate": 8.269036065229426e-06, |
|
"loss": 1.2454, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 2.865312337875366, |
|
"learning_rate": 8.239951099762058e-06, |
|
"loss": 1.1437, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.6844444444444444, |
|
"grad_norm": 2.1165366172790527, |
|
"learning_rate": 8.210675908957513e-06, |
|
"loss": 1.1353, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.6888888888888889, |
|
"grad_norm": 2.63763427734375, |
|
"learning_rate": 8.1812122116328e-06, |
|
"loss": 1.1393, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6933333333333334, |
|
"grad_norm": 3.102932929992676, |
|
"learning_rate": 8.151561737672591e-06, |
|
"loss": 1.1366, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.6977777777777778, |
|
"grad_norm": 3.0296387672424316, |
|
"learning_rate": 8.12172622792767e-06, |
|
"loss": 1.0632, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.7022222222222222, |
|
"grad_norm": 2.647352933883667, |
|
"learning_rate": 8.091707434112717e-06, |
|
"loss": 1.0779, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.7066666666666667, |
|
"grad_norm": 3.4690728187561035, |
|
"learning_rate": 8.061507118703456e-06, |
|
"loss": 1.187, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.7111111111111111, |
|
"grad_norm": 2.914712905883789, |
|
"learning_rate": 8.031127054833192e-06, |
|
"loss": 1.1118, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7155555555555555, |
|
"grad_norm": 2.348532199859619, |
|
"learning_rate": 8.000569026188684e-06, |
|
"loss": 1.1566, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.2149953842163086, |
|
"learning_rate": 7.969834826905441e-06, |
|
"loss": 1.1446, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.7244444444444444, |
|
"grad_norm": 1.8673402070999146, |
|
"learning_rate": 7.938926261462366e-06, |
|
"loss": 1.1512, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.7288888888888889, |
|
"grad_norm": 2.2096667289733887, |
|
"learning_rate": 7.90784514457583e-06, |
|
"loss": 1.0488, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.7333333333333333, |
|
"grad_norm": 2.886953592300415, |
|
"learning_rate": 7.876593301093104e-06, |
|
"loss": 1.1158, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.7377777777777778, |
|
"grad_norm": 3.801467180252075, |
|
"learning_rate": 7.845172565885237e-06, |
|
"loss": 1.1106, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.7422222222222222, |
|
"grad_norm": 2.3027687072753906, |
|
"learning_rate": 7.813584783739314e-06, |
|
"loss": 1.1146, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.7466666666666667, |
|
"grad_norm": 3.938884973526001, |
|
"learning_rate": 7.78183180925015e-06, |
|
"loss": 1.1328, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.7511111111111111, |
|
"grad_norm": 2.19864821434021, |
|
"learning_rate": 7.749915506711402e-06, |
|
"loss": 1.1701, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.7555555555555555, |
|
"grad_norm": 4.781134128570557, |
|
"learning_rate": 7.717837750006106e-06, |
|
"loss": 1.1709, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 2.3147037029266357, |
|
"learning_rate": 7.685600422496666e-06, |
|
"loss": 1.0917, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.7644444444444445, |
|
"grad_norm": 3.0294220447540283, |
|
"learning_rate": 7.653205416914267e-06, |
|
"loss": 1.1209, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.7688888888888888, |
|
"grad_norm": 1.97251296043396, |
|
"learning_rate": 7.620654635247762e-06, |
|
"loss": 1.0584, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.7733333333333333, |
|
"grad_norm": 4.131389141082764, |
|
"learning_rate": 7.587949988631982e-06, |
|
"loss": 1.171, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7777777777777778, |
|
"grad_norm": 2.2693028450012207, |
|
"learning_rate": 7.555093397235553e-06, |
|
"loss": 1.1358, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7822222222222223, |
|
"grad_norm": 2.0202383995056152, |
|
"learning_rate": 7.5220867901481335e-06, |
|
"loss": 1.1823, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.7866666666666666, |
|
"grad_norm": 2.4606690406799316, |
|
"learning_rate": 7.488932105267171e-06, |
|
"loss": 1.0837, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.7911111111111111, |
|
"grad_norm": 1.997691035270691, |
|
"learning_rate": 7.455631289184117e-06, |
|
"loss": 1.1091, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.7955555555555556, |
|
"grad_norm": 2.143990993499756, |
|
"learning_rate": 7.422186297070136e-06, |
|
"loss": 1.1174, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 3.189858913421631, |
|
"learning_rate": 7.388599092561315e-06, |
|
"loss": 1.1757, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8044444444444444, |
|
"grad_norm": 1.796143889427185, |
|
"learning_rate": 7.3548716476433756e-06, |
|
"loss": 1.1114, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.8088888888888889, |
|
"grad_norm": 1.725069284439087, |
|
"learning_rate": 7.32100594253589e-06, |
|
"loss": 1.0613, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.8133333333333334, |
|
"grad_norm": 2.555976152420044, |
|
"learning_rate": 7.2870039655760186e-06, |
|
"loss": 1.0673, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.8177777777777778, |
|
"grad_norm": 2.573438882827759, |
|
"learning_rate": 7.252867713101772e-06, |
|
"loss": 1.1355, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.8222222222222222, |
|
"grad_norm": 2.54061222076416, |
|
"learning_rate": 7.218599189334799e-06, |
|
"loss": 1.1363, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.8266666666666667, |
|
"grad_norm": 2.8169195652008057, |
|
"learning_rate": 7.184200406262717e-06, |
|
"loss": 1.0974, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.8311111111111111, |
|
"grad_norm": 10.035176277160645, |
|
"learning_rate": 7.149673383520978e-06, |
|
"loss": 1.1138, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.8355555555555556, |
|
"grad_norm": 3.066096544265747, |
|
"learning_rate": 7.115020148274294e-06, |
|
"loss": 1.1197, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.641267776489258, |
|
"learning_rate": 7.080242735097622e-06, |
|
"loss": 1.1566, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.8444444444444444, |
|
"grad_norm": 3.2539026737213135, |
|
"learning_rate": 7.045343185856701e-06, |
|
"loss": 1.0788, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.8488888888888889, |
|
"grad_norm": 3.5505993366241455, |
|
"learning_rate": 7.01032354958817e-06, |
|
"loss": 1.1023, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.8533333333333334, |
|
"grad_norm": 2.408964157104492, |
|
"learning_rate": 6.975185882379272e-06, |
|
"loss": 1.1087, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.8577777777777778, |
|
"grad_norm": 2.967325210571289, |
|
"learning_rate": 6.939932247247126e-06, |
|
"loss": 1.0942, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.8622222222222222, |
|
"grad_norm": 1.98670494556427, |
|
"learning_rate": 6.9045647140176145e-06, |
|
"loss": 1.1288, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.8666666666666667, |
|
"grad_norm": 2.2912962436676025, |
|
"learning_rate": 6.869085359203844e-06, |
|
"loss": 1.1148, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.8711111111111111, |
|
"grad_norm": 2.9244866371154785, |
|
"learning_rate": 6.833496265884241e-06, |
|
"loss": 1.1047, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.8755555555555555, |
|
"grad_norm": 3.550187349319458, |
|
"learning_rate": 6.79779952358024e-06, |
|
"loss": 1.1299, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 2.448188066482544, |
|
"learning_rate": 6.76199722813361e-06, |
|
"loss": 1.0937, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.8844444444444445, |
|
"grad_norm": 2.392951488494873, |
|
"learning_rate": 6.726091481583396e-06, |
|
"loss": 1.1075, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 1.764351487159729, |
|
"learning_rate": 6.690084392042514e-06, |
|
"loss": 1.0864, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8933333333333333, |
|
"grad_norm": 2.1497156620025635, |
|
"learning_rate": 6.653978073573962e-06, |
|
"loss": 1.0698, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.8977777777777778, |
|
"grad_norm": 2.4440653324127197, |
|
"learning_rate": 6.6177746460667124e-06, |
|
"loss": 1.1207, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.9022222222222223, |
|
"grad_norm": 5.237727165222168, |
|
"learning_rate": 6.581476235111244e-06, |
|
"loss": 1.2957, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.9066666666666666, |
|
"grad_norm": 2.54868745803833, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 1.1072, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.9111111111111111, |
|
"grad_norm": 2.269233465194702, |
|
"learning_rate": 6.508602992975963e-06, |
|
"loss": 1.096, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.9155555555555556, |
|
"grad_norm": 1.835715413093567, |
|
"learning_rate": 6.472032440359817e-06, |
|
"loss": 1.0722, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 2.5577235221862793, |
|
"learning_rate": 6.43537546117158e-06, |
|
"loss": 1.1194, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.9244444444444444, |
|
"grad_norm": 2.0571846961975098, |
|
"learning_rate": 6.398634207630841e-06, |
|
"loss": 1.1089, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.9288888888888889, |
|
"grad_norm": 3.0140180587768555, |
|
"learning_rate": 6.361810836905138e-06, |
|
"loss": 1.2144, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.9333333333333333, |
|
"grad_norm": 2.731870412826538, |
|
"learning_rate": 6.32490751098331e-06, |
|
"loss": 1.1493, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.9377777777777778, |
|
"grad_norm": 2.427889347076416, |
|
"learning_rate": 6.287926396548556e-06, |
|
"loss": 1.0504, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.9422222222222222, |
|
"grad_norm": 2.487750291824341, |
|
"learning_rate": 6.250869664851226e-06, |
|
"loss": 1.0908, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.9466666666666667, |
|
"grad_norm": 2.3912594318389893, |
|
"learning_rate": 6.213739491581347e-06, |
|
"loss": 1.1216, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.9511111111111111, |
|
"grad_norm": 3.4675991535186768, |
|
"learning_rate": 6.176538056740871e-06, |
|
"loss": 1.0171, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.9555555555555556, |
|
"grad_norm": 1.81716787815094, |
|
"learning_rate": 6.139267544515689e-06, |
|
"loss": 1.044, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 2.6640546321868896, |
|
"learning_rate": 6.101930143147395e-06, |
|
"loss": 1.1029, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.9644444444444444, |
|
"grad_norm": 2.501460313796997, |
|
"learning_rate": 6.064528044804805e-06, |
|
"loss": 1.1141, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.9688888888888889, |
|
"grad_norm": 2.3960464000701904, |
|
"learning_rate": 6.0270634454552494e-06, |
|
"loss": 1.0526, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.9733333333333334, |
|
"grad_norm": 2.989225387573242, |
|
"learning_rate": 5.989538544735644e-06, |
|
"loss": 1.1437, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.9777777777777777, |
|
"grad_norm": 1.963538646697998, |
|
"learning_rate": 5.951955545823342e-06, |
|
"loss": 1.0998, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.9822222222222222, |
|
"grad_norm": 2.2665040493011475, |
|
"learning_rate": 5.9143166553067846e-06, |
|
"loss": 1.1062, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.9866666666666667, |
|
"grad_norm": 2.201535701751709, |
|
"learning_rate": 5.87662408305594e-06, |
|
"loss": 1.042, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.9911111111111112, |
|
"grad_norm": 2.513442039489746, |
|
"learning_rate": 5.8388800420925616e-06, |
|
"loss": 1.0733, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.9955555555555555, |
|
"grad_norm": 2.0468199253082275, |
|
"learning_rate": 5.801086748460255e-06, |
|
"loss": 1.1171, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.2198939323425293, |
|
"learning_rate": 5.763246421094373e-06, |
|
"loss": 1.1027, |
|
"step": 225 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 450, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 225, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.193854425949798e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|