|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9770713913496613, |
|
"eval_steps": 500, |
|
"global_step": 750, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0013027618551328818, |
|
"grad_norm": 14.949752807617188, |
|
"learning_rate": 4.0625e-06, |
|
"loss": 1.3439, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0026055237102657635, |
|
"grad_norm": 15.725403785705566, |
|
"learning_rate": 8.125e-06, |
|
"loss": 1.3354, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0039082855653986455, |
|
"grad_norm": 12.41278076171875, |
|
"learning_rate": 1.2187499999999998e-05, |
|
"loss": 1.3017, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.005211047420531527, |
|
"grad_norm": 6.782334327697754, |
|
"learning_rate": 1.625e-05, |
|
"loss": 1.2634, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.006513809275664409, |
|
"grad_norm": 3.730219841003418, |
|
"learning_rate": 2.03125e-05, |
|
"loss": 1.1704, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.007816571130797291, |
|
"grad_norm": 2.547757863998413, |
|
"learning_rate": 2.4374999999999996e-05, |
|
"loss": 1.165, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.009119332985930172, |
|
"grad_norm": 2.4353976249694824, |
|
"learning_rate": 2.8437499999999997e-05, |
|
"loss": 1.1567, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.010422094841063054, |
|
"grad_norm": 2.218895673751831, |
|
"learning_rate": 3.25e-05, |
|
"loss": 1.0846, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.011724856696195935, |
|
"grad_norm": 1.7433233261108398, |
|
"learning_rate": 3.6562499999999994e-05, |
|
"loss": 0.9816, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.013027618551328817, |
|
"grad_norm": 1.6148579120635986, |
|
"learning_rate": 4.0625e-05, |
|
"loss": 1.0869, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0143303804064617, |
|
"grad_norm": 1.5065230131149292, |
|
"learning_rate": 4.4687499999999995e-05, |
|
"loss": 0.9625, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.015633142261594582, |
|
"grad_norm": 1.4786823987960815, |
|
"learning_rate": 4.874999999999999e-05, |
|
"loss": 1.0489, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.016935904116727463, |
|
"grad_norm": 1.2991230487823486, |
|
"learning_rate": 5.2812499999999996e-05, |
|
"loss": 0.9935, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.018238665971860343, |
|
"grad_norm": 1.3199464082717896, |
|
"learning_rate": 5.687499999999999e-05, |
|
"loss": 1.0188, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.019541427826993224, |
|
"grad_norm": 1.2793257236480713, |
|
"learning_rate": 6.09375e-05, |
|
"loss": 1.0265, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.020844189682126108, |
|
"grad_norm": 1.2210655212402344, |
|
"learning_rate": 6.5e-05, |
|
"loss": 0.9019, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02214695153725899, |
|
"grad_norm": 1.3837445974349976, |
|
"learning_rate": 6.499971563731688e-05, |
|
"loss": 0.9487, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02344971339239187, |
|
"grad_norm": 1.2946529388427734, |
|
"learning_rate": 6.499886255424368e-05, |
|
"loss": 1.0031, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.024752475247524754, |
|
"grad_norm": 1.3142110109329224, |
|
"learning_rate": 6.499744076570868e-05, |
|
"loss": 0.9941, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.026055237102657634, |
|
"grad_norm": 1.8206599950790405, |
|
"learning_rate": 6.499545029659211e-05, |
|
"loss": 0.9767, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.027357998957790515, |
|
"grad_norm": 1.2736281156539917, |
|
"learning_rate": 6.499289118172568e-05, |
|
"loss": 0.9194, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0286607608129234, |
|
"grad_norm": 1.8105640411376953, |
|
"learning_rate": 6.498976346589196e-05, |
|
"loss": 0.9853, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02996352266805628, |
|
"grad_norm": 1.4621827602386475, |
|
"learning_rate": 6.49860672038236e-05, |
|
"loss": 0.9896, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.031266284523189164, |
|
"grad_norm": 1.212299108505249, |
|
"learning_rate": 6.498180246020238e-05, |
|
"loss": 0.9684, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.032569046378322045, |
|
"grad_norm": 1.2274290323257446, |
|
"learning_rate": 6.49769693096581e-05, |
|
"loss": 0.9831, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.033871808233454925, |
|
"grad_norm": 1.2909352779388428, |
|
"learning_rate": 6.49715678367672e-05, |
|
"loss": 0.9971, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.035174570088587806, |
|
"grad_norm": 1.2330715656280518, |
|
"learning_rate": 6.49655981360514e-05, |
|
"loss": 0.9923, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03647733194372069, |
|
"grad_norm": 1.2433793544769287, |
|
"learning_rate": 6.495906031197591e-05, |
|
"loss": 0.9121, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03778009379885357, |
|
"grad_norm": 1.2374027967453003, |
|
"learning_rate": 6.49519544789477e-05, |
|
"loss": 0.9224, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03908285565398645, |
|
"grad_norm": 1.2204357385635376, |
|
"learning_rate": 6.494428076131346e-05, |
|
"loss": 0.919, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.040385617509119336, |
|
"grad_norm": 1.1927292346954346, |
|
"learning_rate": 6.493603929335744e-05, |
|
"loss": 0.9589, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.041688379364252216, |
|
"grad_norm": 1.1353641748428345, |
|
"learning_rate": 6.492723021929908e-05, |
|
"loss": 0.8869, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0429911412193851, |
|
"grad_norm": 1.212263822555542, |
|
"learning_rate": 6.49178536932905e-05, |
|
"loss": 0.8934, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04429390307451798, |
|
"grad_norm": 1.0893241167068481, |
|
"learning_rate": 6.49079098794138e-05, |
|
"loss": 0.8718, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04559666492965086, |
|
"grad_norm": 1.1801607608795166, |
|
"learning_rate": 6.489739895167818e-05, |
|
"loss": 0.9774, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04689942678478374, |
|
"grad_norm": 1.1302330493927002, |
|
"learning_rate": 6.488632109401691e-05, |
|
"loss": 0.9584, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04820218863991663, |
|
"grad_norm": 1.1432682275772095, |
|
"learning_rate": 6.487467650028411e-05, |
|
"loss": 0.9452, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04950495049504951, |
|
"grad_norm": 1.1399481296539307, |
|
"learning_rate": 6.486246537425133e-05, |
|
"loss": 0.9017, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.05080771235018239, |
|
"grad_norm": 1.0911235809326172, |
|
"learning_rate": 6.484968792960403e-05, |
|
"loss": 0.9036, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05211047420531527, |
|
"grad_norm": 1.071078896522522, |
|
"learning_rate": 6.48363443899378e-05, |
|
"loss": 0.9273, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05341323606044815, |
|
"grad_norm": 1.053589105606079, |
|
"learning_rate": 6.482243498875449e-05, |
|
"loss": 0.8847, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05471599791558103, |
|
"grad_norm": 1.109771490097046, |
|
"learning_rate": 6.480795996945806e-05, |
|
"loss": 0.9364, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05601875977071391, |
|
"grad_norm": 1.1093286275863647, |
|
"learning_rate": 6.479291958535037e-05, |
|
"loss": 0.9088, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0573215216258468, |
|
"grad_norm": 1.0716255903244019, |
|
"learning_rate": 6.477731409962675e-05, |
|
"loss": 0.8885, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.05862428348097968, |
|
"grad_norm": 1.1552095413208008, |
|
"learning_rate": 6.476114378537138e-05, |
|
"loss": 0.9736, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05992704533611256, |
|
"grad_norm": 1.1647394895553589, |
|
"learning_rate": 6.474440892555249e-05, |
|
"loss": 0.9138, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.06122980719124544, |
|
"grad_norm": 1.1463412046432495, |
|
"learning_rate": 6.472710981301745e-05, |
|
"loss": 0.9204, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06253256904637833, |
|
"grad_norm": 1.175421118736267, |
|
"learning_rate": 6.470924675048762e-05, |
|
"loss": 0.9119, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06383533090151121, |
|
"grad_norm": 1.9249529838562012, |
|
"learning_rate": 6.469082005055304e-05, |
|
"loss": 0.8639, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06513809275664409, |
|
"grad_norm": 1.1802655458450317, |
|
"learning_rate": 6.467183003566701e-05, |
|
"loss": 0.9352, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06644085461177697, |
|
"grad_norm": 1.1035569906234741, |
|
"learning_rate": 6.465227703814038e-05, |
|
"loss": 0.9342, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.06774361646690985, |
|
"grad_norm": 1.1226942539215088, |
|
"learning_rate": 6.463216140013581e-05, |
|
"loss": 0.8948, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.06904637832204273, |
|
"grad_norm": 1.1143085956573486, |
|
"learning_rate": 6.46114834736617e-05, |
|
"loss": 0.9577, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.07034914017717561, |
|
"grad_norm": 1.1214665174484253, |
|
"learning_rate": 6.459024362056608e-05, |
|
"loss": 0.9106, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.07165190203230849, |
|
"grad_norm": 1.1105942726135254, |
|
"learning_rate": 6.45684422125303e-05, |
|
"loss": 0.8706, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07295466388744137, |
|
"grad_norm": 1.0562191009521484, |
|
"learning_rate": 6.454607963106247e-05, |
|
"loss": 0.8278, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07425742574257425, |
|
"grad_norm": 1.1298907995224, |
|
"learning_rate": 6.452315626749081e-05, |
|
"loss": 0.8883, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07556018759770713, |
|
"grad_norm": 1.133426547050476, |
|
"learning_rate": 6.449967252295682e-05, |
|
"loss": 0.8868, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07686294945284002, |
|
"grad_norm": 1.09175443649292, |
|
"learning_rate": 6.447562880840822e-05, |
|
"loss": 0.8486, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.0781657113079729, |
|
"grad_norm": 1.1153368949890137, |
|
"learning_rate": 6.44510255445918e-05, |
|
"loss": 0.9278, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07946847316310579, |
|
"grad_norm": 1.102013349533081, |
|
"learning_rate": 6.442586316204601e-05, |
|
"loss": 0.8362, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.08077123501823867, |
|
"grad_norm": 1.1041877269744873, |
|
"learning_rate": 6.44001421010935e-05, |
|
"loss": 0.8746, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.08207399687337155, |
|
"grad_norm": 1.075576901435852, |
|
"learning_rate": 6.437386281183331e-05, |
|
"loss": 0.8567, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.08337675872850443, |
|
"grad_norm": 1.1472443342208862, |
|
"learning_rate": 6.43470257541331e-05, |
|
"loss": 0.9023, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08467952058363731, |
|
"grad_norm": 1.1024972200393677, |
|
"learning_rate": 6.431963139762104e-05, |
|
"loss": 0.8982, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0859822824387702, |
|
"grad_norm": 1.1128768920898438, |
|
"learning_rate": 6.42916802216776e-05, |
|
"loss": 0.883, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08728504429390307, |
|
"grad_norm": 1.058384895324707, |
|
"learning_rate": 6.426317271542717e-05, |
|
"loss": 0.8428, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.08858780614903596, |
|
"grad_norm": 1.0728991031646729, |
|
"learning_rate": 6.423410937772951e-05, |
|
"loss": 0.8553, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.08989056800416884, |
|
"grad_norm": 1.0956963300704956, |
|
"learning_rate": 6.4204490717171e-05, |
|
"loss": 0.8894, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.09119332985930172, |
|
"grad_norm": 1.0787287950515747, |
|
"learning_rate": 6.417431725205575e-05, |
|
"loss": 0.9086, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0924960917144346, |
|
"grad_norm": 1.1435580253601074, |
|
"learning_rate": 6.414358951039651e-05, |
|
"loss": 0.9074, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.09379885356956748, |
|
"grad_norm": 1.0396053791046143, |
|
"learning_rate": 6.41123080299055e-05, |
|
"loss": 0.8553, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.09510161542470036, |
|
"grad_norm": 1.0929105281829834, |
|
"learning_rate": 6.40804733579849e-05, |
|
"loss": 0.8504, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09640437727983325, |
|
"grad_norm": 1.1395248174667358, |
|
"learning_rate": 6.404808605171734e-05, |
|
"loss": 0.9222, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.09770713913496613, |
|
"grad_norm": 1.0281647443771362, |
|
"learning_rate": 6.401514667785613e-05, |
|
"loss": 0.868, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09900990099009901, |
|
"grad_norm": 1.0565054416656494, |
|
"learning_rate": 6.398165581281536e-05, |
|
"loss": 0.8302, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.1003126628452319, |
|
"grad_norm": 1.0529290437698364, |
|
"learning_rate": 6.394761404265977e-05, |
|
"loss": 0.8669, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.10161542470036478, |
|
"grad_norm": 1.0579882860183716, |
|
"learning_rate": 6.391302196309452e-05, |
|
"loss": 0.9204, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.10291818655549766, |
|
"grad_norm": 1.0531302690505981, |
|
"learning_rate": 6.387788017945482e-05, |
|
"loss": 0.9202, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.10422094841063054, |
|
"grad_norm": 1.0358260869979858, |
|
"learning_rate": 6.384218930669521e-05, |
|
"loss": 0.8647, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.10552371026576342, |
|
"grad_norm": 1.0599216222763062, |
|
"learning_rate": 6.380594996937895e-05, |
|
"loss": 0.9093, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1068264721208963, |
|
"grad_norm": 1.2026344537734985, |
|
"learning_rate": 6.376916280166696e-05, |
|
"loss": 0.8731, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.10812923397602918, |
|
"grad_norm": 1.1054397821426392, |
|
"learning_rate": 6.373182844730677e-05, |
|
"loss": 0.8835, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.10943199583116206, |
|
"grad_norm": 1.030562162399292, |
|
"learning_rate": 6.369394755962129e-05, |
|
"loss": 0.8432, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.11073475768629494, |
|
"grad_norm": 1.0522128343582153, |
|
"learning_rate": 6.365552080149737e-05, |
|
"loss": 0.8598, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.11203751954142782, |
|
"grad_norm": 1.0692416429519653, |
|
"learning_rate": 6.361654884537413e-05, |
|
"loss": 0.8812, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1133402813965607, |
|
"grad_norm": 1.1009373664855957, |
|
"learning_rate": 6.357703237323127e-05, |
|
"loss": 0.8635, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.1146430432516936, |
|
"grad_norm": 1.1628373861312866, |
|
"learning_rate": 6.353697207657708e-05, |
|
"loss": 0.8907, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.11594580510682648, |
|
"grad_norm": 1.1273458003997803, |
|
"learning_rate": 6.349636865643642e-05, |
|
"loss": 0.8445, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.11724856696195936, |
|
"grad_norm": 1.0971312522888184, |
|
"learning_rate": 6.345522282333835e-05, |
|
"loss": 0.8543, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.11855132881709224, |
|
"grad_norm": 1.0826351642608643, |
|
"learning_rate": 6.341353529730374e-05, |
|
"loss": 0.8605, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.11985409067222512, |
|
"grad_norm": 1.0686413049697876, |
|
"learning_rate": 6.337130680783273e-05, |
|
"loss": 0.8814, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.121156852527358, |
|
"grad_norm": 1.21683931350708, |
|
"learning_rate": 6.332853809389189e-05, |
|
"loss": 0.8979, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.12245961438249088, |
|
"grad_norm": 1.0884461402893066, |
|
"learning_rate": 6.328522990390126e-05, |
|
"loss": 0.8577, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.12376237623762376, |
|
"grad_norm": 1.047310709953308, |
|
"learning_rate": 6.324138299572135e-05, |
|
"loss": 0.8174, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.12506513809275666, |
|
"grad_norm": 1.087296485900879, |
|
"learning_rate": 6.319699813663982e-05, |
|
"loss": 0.8566, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.12636789994788952, |
|
"grad_norm": 1.070327639579773, |
|
"learning_rate": 6.315207610335808e-05, |
|
"loss": 0.8615, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.12767066180302242, |
|
"grad_norm": 1.0299792289733887, |
|
"learning_rate": 6.310661768197764e-05, |
|
"loss": 0.866, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.12897342365815528, |
|
"grad_norm": 1.1011384725570679, |
|
"learning_rate": 6.306062366798642e-05, |
|
"loss": 0.8775, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.13027618551328818, |
|
"grad_norm": 1.0193467140197754, |
|
"learning_rate": 6.301409486624481e-05, |
|
"loss": 0.8243, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13157894736842105, |
|
"grad_norm": 1.0899205207824707, |
|
"learning_rate": 6.296703209097159e-05, |
|
"loss": 0.8849, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.13288170922355394, |
|
"grad_norm": 1.0473856925964355, |
|
"learning_rate": 6.291943616572962e-05, |
|
"loss": 0.8847, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1341844710786868, |
|
"grad_norm": 0.9922148585319519, |
|
"learning_rate": 6.287130792341154e-05, |
|
"loss": 0.8157, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1354872329338197, |
|
"grad_norm": 1.0680872201919556, |
|
"learning_rate": 6.282264820622511e-05, |
|
"loss": 0.8802, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.13678999478895257, |
|
"grad_norm": 1.034562587738037, |
|
"learning_rate": 6.27734578656785e-05, |
|
"loss": 0.8826, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.13809275664408546, |
|
"grad_norm": 1.0247395038604736, |
|
"learning_rate": 6.272373776256536e-05, |
|
"loss": 0.8535, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.13939551849921833, |
|
"grad_norm": 1.099061131477356, |
|
"learning_rate": 6.267348876694983e-05, |
|
"loss": 0.8583, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.14069828035435122, |
|
"grad_norm": 1.0081039667129517, |
|
"learning_rate": 6.262271175815126e-05, |
|
"loss": 0.7988, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.14200104220948412, |
|
"grad_norm": 1.1140209436416626, |
|
"learning_rate": 6.257140762472879e-05, |
|
"loss": 0.8426, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.14330380406461699, |
|
"grad_norm": 1.0654839277267456, |
|
"learning_rate": 6.251957726446589e-05, |
|
"loss": 0.8682, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.14460656591974988, |
|
"grad_norm": 1.0632318258285522, |
|
"learning_rate": 6.246722158435455e-05, |
|
"loss": 0.8379, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.14590932777488275, |
|
"grad_norm": 1.0444918870925903, |
|
"learning_rate": 6.241434150057953e-05, |
|
"loss": 0.8525, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.14721208963001564, |
|
"grad_norm": 1.0542877912521362, |
|
"learning_rate": 6.236093793850217e-05, |
|
"loss": 0.834, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.1485148514851485, |
|
"grad_norm": 0.9843441247940063, |
|
"learning_rate": 6.230701183264436e-05, |
|
"loss": 0.7711, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.1498176133402814, |
|
"grad_norm": 1.0625543594360352, |
|
"learning_rate": 6.225256412667207e-05, |
|
"loss": 0.8169, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.15112037519541427, |
|
"grad_norm": 1.0951343774795532, |
|
"learning_rate": 6.219759577337887e-05, |
|
"loss": 0.9114, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.15242313705054716, |
|
"grad_norm": 1.0442739725112915, |
|
"learning_rate": 6.214210773466932e-05, |
|
"loss": 0.8038, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.15372589890568003, |
|
"grad_norm": 1.0549882650375366, |
|
"learning_rate": 6.2086100981542e-05, |
|
"loss": 0.8677, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.15502866076081293, |
|
"grad_norm": 1.0807327032089233, |
|
"learning_rate": 6.202957649407264e-05, |
|
"loss": 0.8276, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.1563314226159458, |
|
"grad_norm": 1.0784152746200562, |
|
"learning_rate": 6.197253526139696e-05, |
|
"loss": 0.8823, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1576341844710787, |
|
"grad_norm": 1.076708436012268, |
|
"learning_rate": 6.191497828169325e-05, |
|
"loss": 0.842, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.15893694632621158, |
|
"grad_norm": 1.0658226013183594, |
|
"learning_rate": 6.185690656216507e-05, |
|
"loss": 0.8626, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.16023970818134445, |
|
"grad_norm": 1.064942717552185, |
|
"learning_rate": 6.179832111902348e-05, |
|
"loss": 0.8735, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.16154247003647734, |
|
"grad_norm": 1.0782710313796997, |
|
"learning_rate": 6.17392229774693e-05, |
|
"loss": 0.828, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.1628452318916102, |
|
"grad_norm": 1.0276188850402832, |
|
"learning_rate": 6.167961317167528e-05, |
|
"loss": 0.8475, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1641479937467431, |
|
"grad_norm": 1.1249077320098877, |
|
"learning_rate": 6.161949274476778e-05, |
|
"loss": 0.9029, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.16545075560187597, |
|
"grad_norm": 1.027971863746643, |
|
"learning_rate": 6.155886274880874e-05, |
|
"loss": 0.8057, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.16675351745700887, |
|
"grad_norm": 1.0177087783813477, |
|
"learning_rate": 6.14977242447771e-05, |
|
"loss": 0.7928, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.16805627931214173, |
|
"grad_norm": 1.0410820245742798, |
|
"learning_rate": 6.143607830255037e-05, |
|
"loss": 0.8222, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.16935904116727463, |
|
"grad_norm": 1.086966872215271, |
|
"learning_rate": 6.137392600088579e-05, |
|
"loss": 0.837, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1706618030224075, |
|
"grad_norm": 1.099645972251892, |
|
"learning_rate": 6.131126842740155e-05, |
|
"loss": 0.8604, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.1719645648775404, |
|
"grad_norm": 1.0512402057647705, |
|
"learning_rate": 6.124810667855768e-05, |
|
"loss": 0.8524, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.17326732673267325, |
|
"grad_norm": 1.0146111249923706, |
|
"learning_rate": 6.118444185963691e-05, |
|
"loss": 0.7948, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.17457008858780615, |
|
"grad_norm": 1.0564138889312744, |
|
"learning_rate": 6.112027508472533e-05, |
|
"loss": 0.8393, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.17587285044293904, |
|
"grad_norm": 1.1982516050338745, |
|
"learning_rate": 6.105560747669284e-05, |
|
"loss": 0.8326, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.1771756122980719, |
|
"grad_norm": 1.0336800813674927, |
|
"learning_rate": 6.0990440167173595e-05, |
|
"loss": 0.8456, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.1784783741532048, |
|
"grad_norm": 1.0412505865097046, |
|
"learning_rate": 6.092477429654611e-05, |
|
"loss": 0.8026, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.17978113600833767, |
|
"grad_norm": 1.0531303882598877, |
|
"learning_rate": 6.085861101391333e-05, |
|
"loss": 0.8675, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.18108389786347057, |
|
"grad_norm": 1.0367439985275269, |
|
"learning_rate": 6.0791951477082555e-05, |
|
"loss": 0.8328, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.18238665971860343, |
|
"grad_norm": 1.057989478111267, |
|
"learning_rate": 6.072479685254518e-05, |
|
"loss": 0.8129, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.18368942157373633, |
|
"grad_norm": 1.1223191022872925, |
|
"learning_rate": 6.065714831545621e-05, |
|
"loss": 0.8519, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.1849921834288692, |
|
"grad_norm": 1.0480860471725464, |
|
"learning_rate": 6.058900704961377e-05, |
|
"loss": 0.8478, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.1862949452840021, |
|
"grad_norm": 1.0725680589675903, |
|
"learning_rate": 6.0520374247438375e-05, |
|
"loss": 0.8612, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.18759770713913496, |
|
"grad_norm": 1.0441476106643677, |
|
"learning_rate": 6.045125110995204e-05, |
|
"loss": 0.8521, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.18890046899426785, |
|
"grad_norm": 1.0792895555496216, |
|
"learning_rate": 6.0381638846757264e-05, |
|
"loss": 0.8364, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.19020323084940072, |
|
"grad_norm": 1.0203429460525513, |
|
"learning_rate": 6.03115386760159e-05, |
|
"loss": 0.7973, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.1915059927045336, |
|
"grad_norm": 1.0836308002471924, |
|
"learning_rate": 6.024095182442782e-05, |
|
"loss": 0.8719, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.1928087545596665, |
|
"grad_norm": 1.0394123792648315, |
|
"learning_rate": 6.016987952720939e-05, |
|
"loss": 0.8809, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.19411151641479937, |
|
"grad_norm": 1.0890783071517944, |
|
"learning_rate": 6.0098323028071956e-05, |
|
"loss": 0.8996, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.19541427826993227, |
|
"grad_norm": 1.0037873983383179, |
|
"learning_rate": 6.002628357920002e-05, |
|
"loss": 0.8424, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.19671704012506513, |
|
"grad_norm": 1.0470969676971436, |
|
"learning_rate": 5.995376244122935e-05, |
|
"loss": 0.8019, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.19801980198019803, |
|
"grad_norm": 1.0230716466903687, |
|
"learning_rate": 5.988076088322486e-05, |
|
"loss": 0.7927, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.1993225638353309, |
|
"grad_norm": 1.0318727493286133, |
|
"learning_rate": 5.980728018265849e-05, |
|
"loss": 0.803, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.2006253256904638, |
|
"grad_norm": 1.0479062795639038, |
|
"learning_rate": 5.9733321625386824e-05, |
|
"loss": 0.8597, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.20192808754559666, |
|
"grad_norm": 1.025315761566162, |
|
"learning_rate": 5.965888650562853e-05, |
|
"loss": 0.7937, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.20323084940072955, |
|
"grad_norm": 1.0599974393844604, |
|
"learning_rate": 5.958397612594182e-05, |
|
"loss": 0.8628, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.20453361125586242, |
|
"grad_norm": 1.0344469547271729, |
|
"learning_rate": 5.950859179720152e-05, |
|
"loss": 0.8436, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.2058363731109953, |
|
"grad_norm": 1.1034519672393799, |
|
"learning_rate": 5.943273483857627e-05, |
|
"loss": 0.8124, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.20713913496612818, |
|
"grad_norm": 1.0487233400344849, |
|
"learning_rate": 5.935640657750534e-05, |
|
"loss": 0.8337, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.20844189682126107, |
|
"grad_norm": 1.0436427593231201, |
|
"learning_rate": 5.9279608349675444e-05, |
|
"loss": 0.8438, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.20974465867639394, |
|
"grad_norm": 1.0612242221832275, |
|
"learning_rate": 5.920234149899736e-05, |
|
"loss": 0.8735, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.21104742053152684, |
|
"grad_norm": 1.168532371520996, |
|
"learning_rate": 5.912460737758241e-05, |
|
"loss": 0.8422, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.21235018238665973, |
|
"grad_norm": 1.0675426721572876, |
|
"learning_rate": 5.9046407345718805e-05, |
|
"loss": 0.8631, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2136529442417926, |
|
"grad_norm": 1.048614501953125, |
|
"learning_rate": 5.8967742771847826e-05, |
|
"loss": 0.8473, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2149557060969255, |
|
"grad_norm": 1.0362776517868042, |
|
"learning_rate": 5.888861503253989e-05, |
|
"loss": 0.8888, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.21625846795205836, |
|
"grad_norm": 0.9976462721824646, |
|
"learning_rate": 5.8809025512470445e-05, |
|
"loss": 0.8079, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.21756122980719125, |
|
"grad_norm": 1.0061395168304443, |
|
"learning_rate": 5.87289756043958e-05, |
|
"loss": 0.7954, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.21886399166232412, |
|
"grad_norm": 1.0533777475357056, |
|
"learning_rate": 5.864846670912864e-05, |
|
"loss": 0.8089, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.22016675351745701, |
|
"grad_norm": 0.9939700365066528, |
|
"learning_rate": 5.856750023551361e-05, |
|
"loss": 0.8176, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.22146951537258988, |
|
"grad_norm": 1.0513004064559937, |
|
"learning_rate": 5.848607760040267e-05, |
|
"loss": 0.8602, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.22277227722772278, |
|
"grad_norm": 1.055511713027954, |
|
"learning_rate": 5.840420022863016e-05, |
|
"loss": 0.8511, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.22407503908285564, |
|
"grad_norm": 1.0215332508087158, |
|
"learning_rate": 5.832186955298808e-05, |
|
"loss": 0.8233, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.22537780093798854, |
|
"grad_norm": 1.018233299255371, |
|
"learning_rate": 5.8239087014200806e-05, |
|
"loss": 0.8354, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.2266805627931214, |
|
"grad_norm": 1.0055863857269287, |
|
"learning_rate": 5.815585406090005e-05, |
|
"loss": 0.8488, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.2279833246482543, |
|
"grad_norm": 1.008152961730957, |
|
"learning_rate": 5.8072172149599394e-05, |
|
"loss": 0.7623, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.2292860865033872, |
|
"grad_norm": 1.0418919324874878, |
|
"learning_rate": 5.7988042744668856e-05, |
|
"loss": 0.8109, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.23058884835852006, |
|
"grad_norm": 1.0832551717758179, |
|
"learning_rate": 5.790346731830927e-05, |
|
"loss": 0.8951, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.23189161021365295, |
|
"grad_norm": 1.0513347387313843, |
|
"learning_rate": 5.7818447350526476e-05, |
|
"loss": 0.8598, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.23319437206878582, |
|
"grad_norm": 0.9888846278190613, |
|
"learning_rate": 5.7732984329105484e-05, |
|
"loss": 0.746, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.23449713392391872, |
|
"grad_norm": 1.0668085813522339, |
|
"learning_rate": 5.764707974958437e-05, |
|
"loss": 0.8039, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.23579989577905158, |
|
"grad_norm": 1.0392318964004517, |
|
"learning_rate": 5.7560735115228195e-05, |
|
"loss": 0.8155, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.23710265763418448, |
|
"grad_norm": 1.052474021911621, |
|
"learning_rate": 5.7473951937002576e-05, |
|
"loss": 0.8215, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.23840541948931734, |
|
"grad_norm": 1.067699670791626, |
|
"learning_rate": 5.738673173354739e-05, |
|
"loss": 0.8259, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.23970818134445024, |
|
"grad_norm": 1.042398452758789, |
|
"learning_rate": 5.729907603115005e-05, |
|
"loss": 0.8086, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.2410109431995831, |
|
"grad_norm": 1.0346574783325195, |
|
"learning_rate": 5.721098636371894e-05, |
|
"loss": 0.8184, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.242313705054716, |
|
"grad_norm": 1.0383342504501343, |
|
"learning_rate": 5.712246427275646e-05, |
|
"loss": 0.8543, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.24361646690984887, |
|
"grad_norm": 1.0456079244613647, |
|
"learning_rate": 5.703351130733211e-05, |
|
"loss": 0.8244, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.24491922876498176, |
|
"grad_norm": 0.9899833798408508, |
|
"learning_rate": 5.694412902405535e-05, |
|
"loss": 0.7916, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.24622199062011466, |
|
"grad_norm": 1.1231224536895752, |
|
"learning_rate": 5.685431898704839e-05, |
|
"loss": 0.7904, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.24752475247524752, |
|
"grad_norm": 1.0400362014770508, |
|
"learning_rate": 5.676408276791882e-05, |
|
"loss": 0.845, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.24882751433038042, |
|
"grad_norm": 1.0910183191299438, |
|
"learning_rate": 5.6673421945732075e-05, |
|
"loss": 0.8293, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.2501302761855133, |
|
"grad_norm": 1.0755969285964966, |
|
"learning_rate": 5.658233810698381e-05, |
|
"loss": 0.83, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.25143303804064615, |
|
"grad_norm": 1.0231711864471436, |
|
"learning_rate": 5.649083284557219e-05, |
|
"loss": 0.8442, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.25273579989577905, |
|
"grad_norm": 1.0368572473526, |
|
"learning_rate": 5.639890776276991e-05, |
|
"loss": 0.8574, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.25403856175091194, |
|
"grad_norm": 1.0136909484863281, |
|
"learning_rate": 5.630656446719626e-05, |
|
"loss": 0.8151, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.25534132360604483, |
|
"grad_norm": 1.0600650310516357, |
|
"learning_rate": 5.621380457478889e-05, |
|
"loss": 0.7792, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.2566440854611777, |
|
"grad_norm": 1.0362857580184937, |
|
"learning_rate": 5.612062970877565e-05, |
|
"loss": 0.8246, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.25794684731631057, |
|
"grad_norm": 1.0172935724258423, |
|
"learning_rate": 5.602704149964604e-05, |
|
"loss": 0.8196, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.25924960917144346, |
|
"grad_norm": 1.0049176216125488, |
|
"learning_rate": 5.5933041585122805e-05, |
|
"loss": 0.8229, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.26055237102657636, |
|
"grad_norm": 1.0751328468322754, |
|
"learning_rate": 5.5838631610133196e-05, |
|
"loss": 0.8456, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2618551328817092, |
|
"grad_norm": 1.0684475898742676, |
|
"learning_rate": 5.57438132267802e-05, |
|
"loss": 0.8607, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.2631578947368421, |
|
"grad_norm": 1.029704213142395, |
|
"learning_rate": 5.564858809431367e-05, |
|
"loss": 0.7801, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.264460656591975, |
|
"grad_norm": 1.044959306716919, |
|
"learning_rate": 5.5552957879101246e-05, |
|
"loss": 0.8379, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.2657634184471079, |
|
"grad_norm": 1.030415654182434, |
|
"learning_rate": 5.545692425459921e-05, |
|
"loss": 0.8705, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.2670661803022408, |
|
"grad_norm": 1.0018935203552246, |
|
"learning_rate": 5.5360488901323206e-05, |
|
"loss": 0.8122, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.2683689421573736, |
|
"grad_norm": 1.0166568756103516, |
|
"learning_rate": 5.526365350681881e-05, |
|
"loss": 0.8171, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.2696717040125065, |
|
"grad_norm": 1.0195856094360352, |
|
"learning_rate": 5.516641976563204e-05, |
|
"loss": 0.8496, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.2709744658676394, |
|
"grad_norm": 0.985876739025116, |
|
"learning_rate": 5.506878937927966e-05, |
|
"loss": 0.801, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.2722772277227723, |
|
"grad_norm": 1.0150160789489746, |
|
"learning_rate": 5.497076405621944e-05, |
|
"loss": 0.784, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.27357998957790514, |
|
"grad_norm": 1.095823884010315, |
|
"learning_rate": 5.487234551182022e-05, |
|
"loss": 0.8957, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.27488275143303803, |
|
"grad_norm": 1.033151388168335, |
|
"learning_rate": 5.477353546833193e-05, |
|
"loss": 0.8346, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.2761855132881709, |
|
"grad_norm": 0.982230007648468, |
|
"learning_rate": 5.467433565485546e-05, |
|
"loss": 0.7826, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.2774882751433038, |
|
"grad_norm": 0.9829743504524231, |
|
"learning_rate": 5.4574747807312326e-05, |
|
"loss": 0.791, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.27879103699843666, |
|
"grad_norm": 1.0257138013839722, |
|
"learning_rate": 5.4474773668414395e-05, |
|
"loss": 0.8498, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.28009379885356955, |
|
"grad_norm": 1.0054680109024048, |
|
"learning_rate": 5.437441498763332e-05, |
|
"loss": 0.8365, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.28139656070870245, |
|
"grad_norm": 1.0021213293075562, |
|
"learning_rate": 5.427367352116993e-05, |
|
"loss": 0.8313, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.28269932256383534, |
|
"grad_norm": 1.0124083757400513, |
|
"learning_rate": 5.417255103192355e-05, |
|
"loss": 0.81, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.28400208441896824, |
|
"grad_norm": 1.0104458332061768, |
|
"learning_rate": 5.407104928946109e-05, |
|
"loss": 0.7821, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.2853048462741011, |
|
"grad_norm": 1.0982369184494019, |
|
"learning_rate": 5.39691700699861e-05, |
|
"loss": 0.8512, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.28660760812923397, |
|
"grad_norm": 1.0122724771499634, |
|
"learning_rate": 5.386691515630771e-05, |
|
"loss": 0.8634, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.28791036998436687, |
|
"grad_norm": 1.0455682277679443, |
|
"learning_rate": 5.376428633780941e-05, |
|
"loss": 0.8399, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.28921313183949976, |
|
"grad_norm": 0.9880922436714172, |
|
"learning_rate": 5.3661285410417705e-05, |
|
"loss": 0.8044, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.2905158936946326, |
|
"grad_norm": 1.0006861686706543, |
|
"learning_rate": 5.355791417657078e-05, |
|
"loss": 0.7957, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.2918186555497655, |
|
"grad_norm": 1.0367894172668457, |
|
"learning_rate": 5.345417444518687e-05, |
|
"loss": 0.8207, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.2931214174048984, |
|
"grad_norm": 1.0520100593566895, |
|
"learning_rate": 5.335006803163265e-05, |
|
"loss": 0.8586, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.2944241792600313, |
|
"grad_norm": 1.0345007181167603, |
|
"learning_rate": 5.3245596757691435e-05, |
|
"loss": 0.8362, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.2957269411151641, |
|
"grad_norm": 1.0230737924575806, |
|
"learning_rate": 5.3140762451531344e-05, |
|
"loss": 0.8482, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.297029702970297, |
|
"grad_norm": 0.9852018356323242, |
|
"learning_rate": 5.303556694767326e-05, |
|
"loss": 0.8661, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.2983324648254299, |
|
"grad_norm": 0.9801431894302368, |
|
"learning_rate": 5.2930012086958783e-05, |
|
"loss": 0.8702, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.2996352266805628, |
|
"grad_norm": 1.0442442893981934, |
|
"learning_rate": 5.282409971651797e-05, |
|
"loss": 0.8566, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.3009379885356957, |
|
"grad_norm": 1.103188157081604, |
|
"learning_rate": 5.271783168973701e-05, |
|
"loss": 0.8206, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.30224075039082854, |
|
"grad_norm": 1.0066590309143066, |
|
"learning_rate": 5.261120986622583e-05, |
|
"loss": 0.8369, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.30354351224596143, |
|
"grad_norm": 0.9748640060424805, |
|
"learning_rate": 5.250423611178553e-05, |
|
"loss": 0.7792, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.30484627410109433, |
|
"grad_norm": 1.0654217004776, |
|
"learning_rate": 5.239691229837573e-05, |
|
"loss": 0.8242, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.3061490359562272, |
|
"grad_norm": 1.0146901607513428, |
|
"learning_rate": 5.228924030408182e-05, |
|
"loss": 0.8166, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.30745179781136006, |
|
"grad_norm": 1.0119966268539429, |
|
"learning_rate": 5.218122201308209e-05, |
|
"loss": 0.8074, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.30875455966649296, |
|
"grad_norm": 1.0073226690292358, |
|
"learning_rate": 5.207285931561475e-05, |
|
"loss": 0.8292, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.31005732152162585, |
|
"grad_norm": 1.0207470655441284, |
|
"learning_rate": 5.1964154107944876e-05, |
|
"loss": 0.8245, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.31136008337675875, |
|
"grad_norm": 0.9803332090377808, |
|
"learning_rate": 5.185510829233121e-05, |
|
"loss": 0.774, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.3126628452318916, |
|
"grad_norm": 1.0056718587875366, |
|
"learning_rate": 5.174572377699286e-05, |
|
"loss": 0.8308, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3139656070870245, |
|
"grad_norm": 0.9751390218734741, |
|
"learning_rate": 5.163600247607596e-05, |
|
"loss": 0.7648, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.3152683689421574, |
|
"grad_norm": 1.0351225137710571, |
|
"learning_rate": 5.152594630962008e-05, |
|
"loss": 0.7695, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.31657113079729027, |
|
"grad_norm": 1.0021389722824097, |
|
"learning_rate": 5.1415557203524736e-05, |
|
"loss": 0.8158, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.31787389265242316, |
|
"grad_norm": 1.0016628503799438, |
|
"learning_rate": 5.130483708951561e-05, |
|
"loss": 0.793, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.319176654507556, |
|
"grad_norm": 1.008516788482666, |
|
"learning_rate": 5.119378790511079e-05, |
|
"loss": 0.8143, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.3204794163626889, |
|
"grad_norm": 1.0514389276504517, |
|
"learning_rate": 5.108241159358679e-05, |
|
"loss": 0.7945, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.3217821782178218, |
|
"grad_norm": 0.9676159024238586, |
|
"learning_rate": 5.097071010394469e-05, |
|
"loss": 0.7912, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.3230849400729547, |
|
"grad_norm": 1.0046639442443848, |
|
"learning_rate": 5.085868539087586e-05, |
|
"loss": 0.8136, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.3243877019280875, |
|
"grad_norm": 0.993964433670044, |
|
"learning_rate": 5.074633941472788e-05, |
|
"loss": 0.7893, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.3256904637832204, |
|
"grad_norm": 0.9951984286308289, |
|
"learning_rate": 5.0633674141470194e-05, |
|
"loss": 0.7463, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3269932256383533, |
|
"grad_norm": 0.9659764766693115, |
|
"learning_rate": 5.0520691542659655e-05, |
|
"loss": 0.8079, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.3282959874934862, |
|
"grad_norm": 1.006102442741394, |
|
"learning_rate": 5.0407393595406125e-05, |
|
"loss": 0.7864, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.32959874934861905, |
|
"grad_norm": 0.9752157330513, |
|
"learning_rate": 5.029378228233782e-05, |
|
"loss": 0.7683, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.33090151120375194, |
|
"grad_norm": 1.0095077753067017, |
|
"learning_rate": 5.0179859591566575e-05, |
|
"loss": 0.8504, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.33220427305888484, |
|
"grad_norm": 1.0161761045455933, |
|
"learning_rate": 5.0065627516653145e-05, |
|
"loss": 0.8308, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.33350703491401773, |
|
"grad_norm": 0.9710779190063477, |
|
"learning_rate": 4.995108805657226e-05, |
|
"loss": 0.7761, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.3348097967691506, |
|
"grad_norm": 1.0192509889602661, |
|
"learning_rate": 4.983624321567764e-05, |
|
"loss": 0.7955, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.33611255862428346, |
|
"grad_norm": 1.0141615867614746, |
|
"learning_rate": 4.972109500366698e-05, |
|
"loss": 0.8243, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.33741532047941636, |
|
"grad_norm": 1.0046883821487427, |
|
"learning_rate": 4.9605645435546694e-05, |
|
"loss": 0.8272, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.33871808233454925, |
|
"grad_norm": 1.031710147857666, |
|
"learning_rate": 4.948989653159671e-05, |
|
"loss": 0.8187, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.34002084418968215, |
|
"grad_norm": 0.9797648787498474, |
|
"learning_rate": 4.937385031733513e-05, |
|
"loss": 0.8062, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.341323606044815, |
|
"grad_norm": 0.9559136629104614, |
|
"learning_rate": 4.925750882348274e-05, |
|
"loss": 0.7627, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.3426263678999479, |
|
"grad_norm": 1.0041617155075073, |
|
"learning_rate": 4.9140874085927486e-05, |
|
"loss": 0.821, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.3439291297550808, |
|
"grad_norm": 1.0307433605194092, |
|
"learning_rate": 4.9023948145688896e-05, |
|
"loss": 0.7842, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.34523189161021367, |
|
"grad_norm": 0.9956508874893188, |
|
"learning_rate": 4.8906733048882286e-05, |
|
"loss": 0.8067, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.3465346534653465, |
|
"grad_norm": 1.0119211673736572, |
|
"learning_rate": 4.878923084668299e-05, |
|
"loss": 0.8037, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.3478374153204794, |
|
"grad_norm": 1.353070855140686, |
|
"learning_rate": 4.867144359529053e-05, |
|
"loss": 0.8174, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.3491401771756123, |
|
"grad_norm": 1.0314981937408447, |
|
"learning_rate": 4.855337335589247e-05, |
|
"loss": 0.8061, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.3504429390307452, |
|
"grad_norm": 1.0623273849487305, |
|
"learning_rate": 4.8435022194628533e-05, |
|
"loss": 0.8216, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.3517457008858781, |
|
"grad_norm": 0.9801738858222961, |
|
"learning_rate": 4.831639218255434e-05, |
|
"loss": 0.8048, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3530484627410109, |
|
"grad_norm": 0.9815056324005127, |
|
"learning_rate": 4.8197485395605186e-05, |
|
"loss": 0.8232, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.3543512245961438, |
|
"grad_norm": 1.0663788318634033, |
|
"learning_rate": 4.8078303914559706e-05, |
|
"loss": 0.8185, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.3556539864512767, |
|
"grad_norm": 0.9978227019309998, |
|
"learning_rate": 4.795884982500349e-05, |
|
"loss": 0.777, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.3569567483064096, |
|
"grad_norm": 1.0210098028182983, |
|
"learning_rate": 4.7839125217292556e-05, |
|
"loss": 0.8353, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.35825951016154245, |
|
"grad_norm": 0.9370939135551453, |
|
"learning_rate": 4.77191321865168e-05, |
|
"loss": 0.7465, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.35956227201667534, |
|
"grad_norm": 1.0085511207580566, |
|
"learning_rate": 4.759887283246331e-05, |
|
"loss": 0.8009, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.36086503387180824, |
|
"grad_norm": 0.9499126672744751, |
|
"learning_rate": 4.747834925957963e-05, |
|
"loss": 0.8041, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.36216779572694113, |
|
"grad_norm": 0.9528019428253174, |
|
"learning_rate": 4.735756357693692e-05, |
|
"loss": 0.7454, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.36347055758207397, |
|
"grad_norm": 1.011826515197754, |
|
"learning_rate": 4.7236517898193094e-05, |
|
"loss": 0.7772, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.36477331943720687, |
|
"grad_norm": 0.9649999141693115, |
|
"learning_rate": 4.711521434155578e-05, |
|
"loss": 0.8043, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.36607608129233976, |
|
"grad_norm": 0.9927408695220947, |
|
"learning_rate": 4.6993655029745263e-05, |
|
"loss": 0.8378, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.36737884314747266, |
|
"grad_norm": 1.0498725175857544, |
|
"learning_rate": 4.6871842089957375e-05, |
|
"loss": 0.8455, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.36868160500260555, |
|
"grad_norm": 0.9903897643089294, |
|
"learning_rate": 4.674977765382622e-05, |
|
"loss": 0.7628, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.3699843668577384, |
|
"grad_norm": 1.0428470373153687, |
|
"learning_rate": 4.662746385738691e-05, |
|
"loss": 0.8221, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.3712871287128713, |
|
"grad_norm": 0.9479437470436096, |
|
"learning_rate": 4.6504902841038185e-05, |
|
"loss": 0.7747, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.3725898905680042, |
|
"grad_norm": 0.9896458983421326, |
|
"learning_rate": 4.6382096749504944e-05, |
|
"loss": 0.7987, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.3738926524231371, |
|
"grad_norm": 0.9583244919776917, |
|
"learning_rate": 4.6259047731800675e-05, |
|
"loss": 0.7863, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.3751954142782699, |
|
"grad_norm": 0.9551007747650146, |
|
"learning_rate": 4.613575794118994e-05, |
|
"loss": 0.7665, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.3764981761334028, |
|
"grad_norm": 1.0227469205856323, |
|
"learning_rate": 4.601222953515063e-05, |
|
"loss": 0.8208, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.3778009379885357, |
|
"grad_norm": 1.0073151588439941, |
|
"learning_rate": 4.588846467533622e-05, |
|
"loss": 0.8182, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3791036998436686, |
|
"grad_norm": 0.9630952477455139, |
|
"learning_rate": 4.576446552753793e-05, |
|
"loss": 0.7829, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.38040646169880143, |
|
"grad_norm": 0.9635125398635864, |
|
"learning_rate": 4.564023426164688e-05, |
|
"loss": 0.8203, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.38170922355393433, |
|
"grad_norm": 0.9933289885520935, |
|
"learning_rate": 4.551577305161605e-05, |
|
"loss": 0.7674, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.3830119854090672, |
|
"grad_norm": 1.050747036933899, |
|
"learning_rate": 4.539108407542228e-05, |
|
"loss": 0.8082, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.3843147472642001, |
|
"grad_norm": 1.0443624258041382, |
|
"learning_rate": 4.526616951502815e-05, |
|
"loss": 0.8243, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.385617509119333, |
|
"grad_norm": 1.0552526712417603, |
|
"learning_rate": 4.5141031556343774e-05, |
|
"loss": 0.7807, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.38692027097446585, |
|
"grad_norm": 1.0113283395767212, |
|
"learning_rate": 4.501567238918859e-05, |
|
"loss": 0.8494, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.38822303282959875, |
|
"grad_norm": 1.0051511526107788, |
|
"learning_rate": 4.4890094207253014e-05, |
|
"loss": 0.7844, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.38952579468473164, |
|
"grad_norm": 1.0065789222717285, |
|
"learning_rate": 4.476429920806002e-05, |
|
"loss": 0.7767, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.39082855653986454, |
|
"grad_norm": 1.0727571249008179, |
|
"learning_rate": 4.463828959292677e-05, |
|
"loss": 0.8314, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3921313183949974, |
|
"grad_norm": 1.0275354385375977, |
|
"learning_rate": 4.451206756692601e-05, |
|
"loss": 0.8169, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.39343408025013027, |
|
"grad_norm": 1.0231237411499023, |
|
"learning_rate": 4.4385635338847514e-05, |
|
"loss": 0.7747, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.39473684210526316, |
|
"grad_norm": 0.9855037927627563, |
|
"learning_rate": 4.4258995121159454e-05, |
|
"loss": 0.8316, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.39603960396039606, |
|
"grad_norm": 0.9906755089759827, |
|
"learning_rate": 4.413214912996965e-05, |
|
"loss": 0.8246, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.3973423658155289, |
|
"grad_norm": 1.0087635517120361, |
|
"learning_rate": 4.4005099584986805e-05, |
|
"loss": 0.7531, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.3986451276706618, |
|
"grad_norm": 1.024081826210022, |
|
"learning_rate": 4.387784870948165e-05, |
|
"loss": 0.7737, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.3999478895257947, |
|
"grad_norm": 1.0000710487365723, |
|
"learning_rate": 4.375039873024805e-05, |
|
"loss": 0.8158, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.4012506513809276, |
|
"grad_norm": 0.9954432249069214, |
|
"learning_rate": 4.362275187756406e-05, |
|
"loss": 0.7869, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.4025534132360605, |
|
"grad_norm": 0.9808403849601746, |
|
"learning_rate": 4.3494910385152844e-05, |
|
"loss": 0.7731, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.4038561750911933, |
|
"grad_norm": 0.9858344197273254, |
|
"learning_rate": 4.336687649014361e-05, |
|
"loss": 0.8209, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.4051589369463262, |
|
"grad_norm": 0.9827001690864563, |
|
"learning_rate": 4.323865243303247e-05, |
|
"loss": 0.7704, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.4064616988014591, |
|
"grad_norm": 0.9746841788291931, |
|
"learning_rate": 4.311024045764326e-05, |
|
"loss": 0.805, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.407764460656592, |
|
"grad_norm": 0.964389443397522, |
|
"learning_rate": 4.298164281108819e-05, |
|
"loss": 0.7703, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.40906722251172484, |
|
"grad_norm": 0.9591614603996277, |
|
"learning_rate": 4.2852861743728624e-05, |
|
"loss": 0.7739, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.41036998436685773, |
|
"grad_norm": 0.9757986664772034, |
|
"learning_rate": 4.272389950913562e-05, |
|
"loss": 0.7903, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.4116727462219906, |
|
"grad_norm": 1.0358850955963135, |
|
"learning_rate": 4.259475836405054e-05, |
|
"loss": 0.8438, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.4129755080771235, |
|
"grad_norm": 0.9954838752746582, |
|
"learning_rate": 4.2465440568345526e-05, |
|
"loss": 0.8139, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.41427826993225636, |
|
"grad_norm": 1.109437108039856, |
|
"learning_rate": 4.2335948384984e-05, |
|
"loss": 0.8427, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.41558103178738925, |
|
"grad_norm": 1.0301264524459839, |
|
"learning_rate": 4.220628407998102e-05, |
|
"loss": 0.8215, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.41688379364252215, |
|
"grad_norm": 1.0422919988632202, |
|
"learning_rate": 4.2076449922363616e-05, |
|
"loss": 0.82, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.41818655549765504, |
|
"grad_norm": 0.936849057674408, |
|
"learning_rate": 4.194644818413117e-05, |
|
"loss": 0.7258, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.4194893173527879, |
|
"grad_norm": 0.9646280407905579, |
|
"learning_rate": 4.181628114021556e-05, |
|
"loss": 0.8013, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.4207920792079208, |
|
"grad_norm": 0.9410605430603027, |
|
"learning_rate": 4.168595106844139e-05, |
|
"loss": 0.791, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.42209484106305367, |
|
"grad_norm": 1.0189350843429565, |
|
"learning_rate": 4.155546024948611e-05, |
|
"loss": 0.7986, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.42339760291818657, |
|
"grad_norm": 0.9734213948249817, |
|
"learning_rate": 4.1424810966840165e-05, |
|
"loss": 0.798, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.42470036477331946, |
|
"grad_norm": 0.9883679747581482, |
|
"learning_rate": 4.129400550676697e-05, |
|
"loss": 0.7922, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.4260031266284523, |
|
"grad_norm": 1.044937014579773, |
|
"learning_rate": 4.116304615826294e-05, |
|
"loss": 0.8352, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.4273058884835852, |
|
"grad_norm": 0.9842580556869507, |
|
"learning_rate": 4.10319352130174e-05, |
|
"loss": 0.8078, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.4286086503387181, |
|
"grad_norm": 0.9620386958122253, |
|
"learning_rate": 4.090067496537251e-05, |
|
"loss": 0.7724, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.429911412193851, |
|
"grad_norm": 0.9960795044898987, |
|
"learning_rate": 4.076926771228312e-05, |
|
"loss": 0.7792, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.4312141740489838, |
|
"grad_norm": 0.9758504629135132, |
|
"learning_rate": 4.0637715753276564e-05, |
|
"loss": 0.81, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.4325169359041167, |
|
"grad_norm": 0.9628814458847046, |
|
"learning_rate": 4.050602139041242e-05, |
|
"loss": 0.7766, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.4338196977592496, |
|
"grad_norm": 0.9933943748474121, |
|
"learning_rate": 4.037418692824219e-05, |
|
"loss": 0.8216, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.4351224596143825, |
|
"grad_norm": 1.0571389198303223, |
|
"learning_rate": 4.024221467376904e-05, |
|
"loss": 0.8159, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.43642522146951535, |
|
"grad_norm": 0.9927603006362915, |
|
"learning_rate": 4.011010693640743e-05, |
|
"loss": 0.7874, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.43772798332464824, |
|
"grad_norm": 0.9746401906013489, |
|
"learning_rate": 3.9977866027942596e-05, |
|
"loss": 0.8087, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.43903074517978113, |
|
"grad_norm": 0.9622447490692139, |
|
"learning_rate": 3.984549426249021e-05, |
|
"loss": 0.7683, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.44033350703491403, |
|
"grad_norm": 0.9910291433334351, |
|
"learning_rate": 3.9712993956455846e-05, |
|
"loss": 0.808, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.4416362688900469, |
|
"grad_norm": 1.0119026899337769, |
|
"learning_rate": 3.958036742849441e-05, |
|
"loss": 0.8188, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.44293903074517976, |
|
"grad_norm": 0.9805094599723816, |
|
"learning_rate": 3.9447616999469633e-05, |
|
"loss": 0.7924, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.44424179260031266, |
|
"grad_norm": 0.9824138283729553, |
|
"learning_rate": 3.9314744992413405e-05, |
|
"loss": 0.7637, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.44554455445544555, |
|
"grad_norm": 0.9920548796653748, |
|
"learning_rate": 3.9181753732485114e-05, |
|
"loss": 0.7444, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.44684731631057845, |
|
"grad_norm": 1.054882287979126, |
|
"learning_rate": 3.9048645546931025e-05, |
|
"loss": 0.8301, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.4481500781657113, |
|
"grad_norm": 0.9370349645614624, |
|
"learning_rate": 3.8915422765043496e-05, |
|
"loss": 0.7514, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.4494528400208442, |
|
"grad_norm": 0.9788942933082581, |
|
"learning_rate": 3.878208771812023e-05, |
|
"loss": 0.7609, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.4507556018759771, |
|
"grad_norm": 0.9948318004608154, |
|
"learning_rate": 3.864864273942347e-05, |
|
"loss": 0.8337, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.45205836373110997, |
|
"grad_norm": 0.9403611421585083, |
|
"learning_rate": 3.851509016413922e-05, |
|
"loss": 0.7551, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.4533611255862428, |
|
"grad_norm": 1.0166083574295044, |
|
"learning_rate": 3.838143232933631e-05, |
|
"loss": 0.777, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.4546638874413757, |
|
"grad_norm": 1.039010763168335, |
|
"learning_rate": 3.8247671573925534e-05, |
|
"loss": 0.8791, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.4559666492965086, |
|
"grad_norm": 1.0491831302642822, |
|
"learning_rate": 3.8113810238618744e-05, |
|
"loss": 0.8649, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4572694111516415, |
|
"grad_norm": 1.0406017303466797, |
|
"learning_rate": 3.7979850665887825e-05, |
|
"loss": 0.8064, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.4585721730067744, |
|
"grad_norm": 1.0099365711212158, |
|
"learning_rate": 3.784579519992377e-05, |
|
"loss": 0.8251, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.4598749348619072, |
|
"grad_norm": 0.992388129234314, |
|
"learning_rate": 3.771164618659563e-05, |
|
"loss": 0.7879, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.4611776967170401, |
|
"grad_norm": 1.0061777830123901, |
|
"learning_rate": 3.757740597340946e-05, |
|
"loss": 0.8238, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.462480458572173, |
|
"grad_norm": 0.9892385601997375, |
|
"learning_rate": 3.744307690946723e-05, |
|
"loss": 0.8531, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.4637832204273059, |
|
"grad_norm": 1.030580759048462, |
|
"learning_rate": 3.730866134542575e-05, |
|
"loss": 0.8113, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.46508598228243875, |
|
"grad_norm": 0.9647723436355591, |
|
"learning_rate": 3.717416163345552e-05, |
|
"loss": 0.7581, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.46638874413757164, |
|
"grad_norm": 1.0186620950698853, |
|
"learning_rate": 3.703958012719953e-05, |
|
"loss": 0.7787, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.46769150599270454, |
|
"grad_norm": 0.9629569053649902, |
|
"learning_rate": 3.690491918173215e-05, |
|
"loss": 0.7408, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.46899426784783743, |
|
"grad_norm": 1.0066182613372803, |
|
"learning_rate": 3.677018115351786e-05, |
|
"loss": 0.7768, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.47029702970297027, |
|
"grad_norm": 0.9587082862854004, |
|
"learning_rate": 3.663536840037001e-05, |
|
"loss": 0.7646, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.47159979155810317, |
|
"grad_norm": 0.9719291925430298, |
|
"learning_rate": 3.65004832814096e-05, |
|
"loss": 0.7632, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.47290255341323606, |
|
"grad_norm": 1.0020971298217773, |
|
"learning_rate": 3.6365528157024e-05, |
|
"loss": 0.82, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.47420531526836895, |
|
"grad_norm": 1.0055842399597168, |
|
"learning_rate": 3.623050538882556e-05, |
|
"loss": 0.853, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.47550807712350185, |
|
"grad_norm": 0.9556001424789429, |
|
"learning_rate": 3.6095417339610406e-05, |
|
"loss": 0.7644, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.4768108389786347, |
|
"grad_norm": 1.0069520473480225, |
|
"learning_rate": 3.5960266373317e-05, |
|
"loss": 0.784, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.4781136008337676, |
|
"grad_norm": 0.9717457890510559, |
|
"learning_rate": 3.58250548549848e-05, |
|
"loss": 0.7313, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.4794163626889005, |
|
"grad_norm": 0.9917356371879578, |
|
"learning_rate": 3.5689785150712915e-05, |
|
"loss": 0.751, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.48071912454403337, |
|
"grad_norm": 1.0234906673431396, |
|
"learning_rate": 3.555445962761864e-05, |
|
"loss": 0.7926, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.4820218863991662, |
|
"grad_norm": 0.953758716583252, |
|
"learning_rate": 3.541908065379603e-05, |
|
"loss": 0.7566, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.4833246482542991, |
|
"grad_norm": 0.9510905146598816, |
|
"learning_rate": 3.5283650598274535e-05, |
|
"loss": 0.7873, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.484627410109432, |
|
"grad_norm": 1.016905665397644, |
|
"learning_rate": 3.5148171830977476e-05, |
|
"loss": 0.8505, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.4859301719645649, |
|
"grad_norm": 0.9876651763916016, |
|
"learning_rate": 3.501264672268058e-05, |
|
"loss": 0.763, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.48723293381969773, |
|
"grad_norm": 1.031883716583252, |
|
"learning_rate": 3.4877077644970536e-05, |
|
"loss": 0.77, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.48853569567483063, |
|
"grad_norm": 0.9771068692207336, |
|
"learning_rate": 3.474146697020344e-05, |
|
"loss": 0.7855, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.4898384575299635, |
|
"grad_norm": 1.0575013160705566, |
|
"learning_rate": 3.460581707146331e-05, |
|
"loss": 0.822, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.4911412193850964, |
|
"grad_norm": 0.9549304842948914, |
|
"learning_rate": 3.4470130322520564e-05, |
|
"loss": 0.745, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.4924439812402293, |
|
"grad_norm": 0.9750242829322815, |
|
"learning_rate": 3.433440909779045e-05, |
|
"loss": 0.794, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.49374674309536215, |
|
"grad_norm": 0.9908477067947388, |
|
"learning_rate": 3.4198655772291545e-05, |
|
"loss": 0.761, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.49504950495049505, |
|
"grad_norm": 0.9413042068481445, |
|
"learning_rate": 3.406287272160414e-05, |
|
"loss": 0.7396, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.49635226680562794, |
|
"grad_norm": 1.0032494068145752, |
|
"learning_rate": 3.392706232182871e-05, |
|
"loss": 0.7761, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.49765502866076083, |
|
"grad_norm": 1.006235122680664, |
|
"learning_rate": 3.37912269495443e-05, |
|
"loss": 0.8058, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.4989577905158937, |
|
"grad_norm": 1.0400941371917725, |
|
"learning_rate": 3.3655368981766984e-05, |
|
"loss": 0.7932, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.5002605523710266, |
|
"grad_norm": 0.962922990322113, |
|
"learning_rate": 3.35194907959082e-05, |
|
"loss": 0.8032, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.5015633142261594, |
|
"grad_norm": 1.0155935287475586, |
|
"learning_rate": 3.338359476973322e-05, |
|
"loss": 0.7974, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.5028660760812923, |
|
"grad_norm": 1.0011403560638428, |
|
"learning_rate": 3.324768328131952e-05, |
|
"loss": 0.8059, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.5041688379364252, |
|
"grad_norm": 0.9862634539604187, |
|
"learning_rate": 3.31117587090151e-05, |
|
"loss": 0.7676, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.5054715997915581, |
|
"grad_norm": 1.0101540088653564, |
|
"learning_rate": 3.297582343139697e-05, |
|
"loss": 0.784, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.506774361646691, |
|
"grad_norm": 1.0223101377487183, |
|
"learning_rate": 3.283987982722943e-05, |
|
"loss": 0.7899, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.5080771235018239, |
|
"grad_norm": 1.0440499782562256, |
|
"learning_rate": 3.2703930275422554e-05, |
|
"loss": 0.764, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.5093798853569568, |
|
"grad_norm": 0.9758248925209045, |
|
"learning_rate": 3.256797715499041e-05, |
|
"loss": 0.785, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.5106826472120897, |
|
"grad_norm": 1.0492044687271118, |
|
"learning_rate": 3.2432022845009586e-05, |
|
"loss": 0.8138, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.5119854090672226, |
|
"grad_norm": 0.9614241719245911, |
|
"learning_rate": 3.229606972457745e-05, |
|
"loss": 0.7831, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.5132881709223553, |
|
"grad_norm": 0.9824920296669006, |
|
"learning_rate": 3.216012017277056e-05, |
|
"loss": 0.797, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.5145909327774882, |
|
"grad_norm": 0.9702708125114441, |
|
"learning_rate": 3.202417656860303e-05, |
|
"loss": 0.7841, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.5158936946326211, |
|
"grad_norm": 1.0034425258636475, |
|
"learning_rate": 3.18882412909849e-05, |
|
"loss": 0.7732, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.517196456487754, |
|
"grad_norm": 0.9518890976905823, |
|
"learning_rate": 3.175231671868049e-05, |
|
"loss": 0.7917, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.5184992183428869, |
|
"grad_norm": 0.9872190356254578, |
|
"learning_rate": 3.1616405230266765e-05, |
|
"loss": 0.8268, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.5198019801980198, |
|
"grad_norm": 0.9964843988418579, |
|
"learning_rate": 3.14805092040918e-05, |
|
"loss": 0.8018, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.5211047420531527, |
|
"grad_norm": 0.9678840637207031, |
|
"learning_rate": 3.134463101823302e-05, |
|
"loss": 0.7752, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5224075039082856, |
|
"grad_norm": 1.0066790580749512, |
|
"learning_rate": 3.1208773050455704e-05, |
|
"loss": 0.8203, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.5237102657634184, |
|
"grad_norm": 1.0079424381256104, |
|
"learning_rate": 3.10729376781713e-05, |
|
"loss": 0.7749, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.5250130276185513, |
|
"grad_norm": 0.9970865249633789, |
|
"learning_rate": 3.093712727839586e-05, |
|
"loss": 0.7815, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 0.9871413111686707, |
|
"learning_rate": 3.080134422770844e-05, |
|
"loss": 0.8036, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.5276185513288171, |
|
"grad_norm": 0.9559234380722046, |
|
"learning_rate": 3.066559090220955e-05, |
|
"loss": 0.7968, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.52892131318395, |
|
"grad_norm": 1.0165913105010986, |
|
"learning_rate": 3.052986967747944e-05, |
|
"loss": 0.7787, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.5302240750390829, |
|
"grad_norm": 0.9621928334236145, |
|
"learning_rate": 3.0394182928536692e-05, |
|
"loss": 0.7982, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.5315268368942158, |
|
"grad_norm": 0.9478201866149902, |
|
"learning_rate": 3.025853302979656e-05, |
|
"loss": 0.7643, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.5328295987493487, |
|
"grad_norm": 0.9699729681015015, |
|
"learning_rate": 3.0122922355029468e-05, |
|
"loss": 0.8126, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.5341323606044815, |
|
"grad_norm": 0.9657052755355835, |
|
"learning_rate": 2.9987353277319415e-05, |
|
"loss": 0.7492, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5354351224596143, |
|
"grad_norm": 0.9548848867416382, |
|
"learning_rate": 2.985182816902253e-05, |
|
"loss": 0.7524, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.5367378843147472, |
|
"grad_norm": 0.9573311805725098, |
|
"learning_rate": 2.9716349401725466e-05, |
|
"loss": 0.7849, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.5380406461698801, |
|
"grad_norm": 0.9521474242210388, |
|
"learning_rate": 2.9580919346203974e-05, |
|
"loss": 0.7649, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.539343408025013, |
|
"grad_norm": 0.9720420241355896, |
|
"learning_rate": 2.944554037238137e-05, |
|
"loss": 0.7711, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.5406461698801459, |
|
"grad_norm": 0.9932730197906494, |
|
"learning_rate": 2.9310214849287082e-05, |
|
"loss": 0.7859, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.5419489317352788, |
|
"grad_norm": 0.9962913990020752, |
|
"learning_rate": 2.917494514501519e-05, |
|
"loss": 0.7431, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.5432516935904117, |
|
"grad_norm": 1.0048481225967407, |
|
"learning_rate": 2.9039733626682997e-05, |
|
"loss": 0.8003, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.5445544554455446, |
|
"grad_norm": 1.0100042819976807, |
|
"learning_rate": 2.8904582660389588e-05, |
|
"loss": 0.7943, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.5458572173006775, |
|
"grad_norm": 0.9386914372444153, |
|
"learning_rate": 2.8769494611174438e-05, |
|
"loss": 0.7061, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.5471599791558103, |
|
"grad_norm": 0.9435542225837708, |
|
"learning_rate": 2.8634471842976007e-05, |
|
"loss": 0.757, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5484627410109432, |
|
"grad_norm": 0.9891154766082764, |
|
"learning_rate": 2.8499516718590397e-05, |
|
"loss": 0.7592, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.5497655028660761, |
|
"grad_norm": 0.9522406458854675, |
|
"learning_rate": 2.8364631599629983e-05, |
|
"loss": 0.7653, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.551068264721209, |
|
"grad_norm": 0.9937999248504639, |
|
"learning_rate": 2.8229818846482137e-05, |
|
"loss": 0.7801, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.5523710265763419, |
|
"grad_norm": 0.9582085609436035, |
|
"learning_rate": 2.8095080818267844e-05, |
|
"loss": 0.7721, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.5536737884314747, |
|
"grad_norm": 0.9662520885467529, |
|
"learning_rate": 2.7960419872800468e-05, |
|
"loss": 0.7382, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.5549765502866076, |
|
"grad_norm": 0.9714727997779846, |
|
"learning_rate": 2.7825838366544486e-05, |
|
"loss": 0.7918, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.5562793121417405, |
|
"grad_norm": 0.9601700305938721, |
|
"learning_rate": 2.7691338654574235e-05, |
|
"loss": 0.7797, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.5575820739968733, |
|
"grad_norm": 1.0009185075759888, |
|
"learning_rate": 2.7556923090532765e-05, |
|
"loss": 0.7606, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.5588848358520062, |
|
"grad_norm": 1.0026593208312988, |
|
"learning_rate": 2.7422594026590544e-05, |
|
"loss": 0.7902, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.5601875977071391, |
|
"grad_norm": 0.9597018957138062, |
|
"learning_rate": 2.728835381340437e-05, |
|
"loss": 0.7998, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.561490359562272, |
|
"grad_norm": 1.0047636032104492, |
|
"learning_rate": 2.715420480007623e-05, |
|
"loss": 0.7756, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.5627931214174049, |
|
"grad_norm": 0.9273685216903687, |
|
"learning_rate": 2.702014933411218e-05, |
|
"loss": 0.7212, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.5640958832725378, |
|
"grad_norm": 0.9448891282081604, |
|
"learning_rate": 2.6886189761381257e-05, |
|
"loss": 0.7973, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.5653986451276707, |
|
"grad_norm": 0.993796706199646, |
|
"learning_rate": 2.6752328426074453e-05, |
|
"loss": 0.7812, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.5667014069828036, |
|
"grad_norm": 0.9424161911010742, |
|
"learning_rate": 2.6618567670663682e-05, |
|
"loss": 0.7371, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.5680041688379365, |
|
"grad_norm": 0.9722470641136169, |
|
"learning_rate": 2.648490983586077e-05, |
|
"loss": 0.7585, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.5693069306930693, |
|
"grad_norm": 0.9650734066963196, |
|
"learning_rate": 2.635135726057652e-05, |
|
"loss": 0.7931, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.5706096925482022, |
|
"grad_norm": 0.9851652383804321, |
|
"learning_rate": 2.621791228187977e-05, |
|
"loss": 0.7899, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.571912454403335, |
|
"grad_norm": 0.9415690898895264, |
|
"learning_rate": 2.6084577234956492e-05, |
|
"loss": 0.7576, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.5732152162584679, |
|
"grad_norm": 0.9792377352714539, |
|
"learning_rate": 2.5951354453068963e-05, |
|
"loss": 0.767, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.5745179781136008, |
|
"grad_norm": 0.956155002117157, |
|
"learning_rate": 2.5818246267514877e-05, |
|
"loss": 0.7706, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.5758207399687337, |
|
"grad_norm": 1.0018938779830933, |
|
"learning_rate": 2.5685255007586596e-05, |
|
"loss": 0.7977, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.5771235018238666, |
|
"grad_norm": 0.9325889945030212, |
|
"learning_rate": 2.5552383000530368e-05, |
|
"loss": 0.7331, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.5784262636789995, |
|
"grad_norm": 1.0071479082107544, |
|
"learning_rate": 2.541963257150559e-05, |
|
"loss": 0.8098, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.5797290255341324, |
|
"grad_norm": 0.9542084336280823, |
|
"learning_rate": 2.528700604354415e-05, |
|
"loss": 0.7935, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.5810317873892652, |
|
"grad_norm": 0.9786474704742432, |
|
"learning_rate": 2.5154505737509783e-05, |
|
"loss": 0.7787, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.5823345492443981, |
|
"grad_norm": 1.0172762870788574, |
|
"learning_rate": 2.5022133972057408e-05, |
|
"loss": 0.7901, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.583637311099531, |
|
"grad_norm": 0.9035043716430664, |
|
"learning_rate": 2.4889893063592575e-05, |
|
"loss": 0.7206, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.5849400729546639, |
|
"grad_norm": 0.9805537462234497, |
|
"learning_rate": 2.4757785326230956e-05, |
|
"loss": 0.7885, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.5862428348097968, |
|
"grad_norm": 0.9988163113594055, |
|
"learning_rate": 2.462581307175782e-05, |
|
"loss": 0.7653, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.5875455966649297, |
|
"grad_norm": 0.9464346766471863, |
|
"learning_rate": 2.4493978609587585e-05, |
|
"loss": 0.7579, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.5888483585200626, |
|
"grad_norm": 0.8832387328147888, |
|
"learning_rate": 2.4362284246723423e-05, |
|
"loss": 0.7006, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.5901511203751955, |
|
"grad_norm": 1.0256978273391724, |
|
"learning_rate": 2.423073228771687e-05, |
|
"loss": 0.8271, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.5914538822303282, |
|
"grad_norm": 0.9342496395111084, |
|
"learning_rate": 2.409932503462749e-05, |
|
"loss": 0.7142, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.5927566440854611, |
|
"grad_norm": 0.9707951545715332, |
|
"learning_rate": 2.39680647869826e-05, |
|
"loss": 0.7556, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.594059405940594, |
|
"grad_norm": 0.9290401339530945, |
|
"learning_rate": 2.383695384173706e-05, |
|
"loss": 0.7305, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.5953621677957269, |
|
"grad_norm": 0.9683957099914551, |
|
"learning_rate": 2.3705994493233018e-05, |
|
"loss": 0.7383, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.5966649296508598, |
|
"grad_norm": 0.9259218573570251, |
|
"learning_rate": 2.3575189033159826e-05, |
|
"loss": 0.7105, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.5979676915059927, |
|
"grad_norm": 0.9563105702400208, |
|
"learning_rate": 2.3444539750513887e-05, |
|
"loss": 0.7737, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.5992704533611256, |
|
"grad_norm": 0.9757603406906128, |
|
"learning_rate": 2.3314048931558614e-05, |
|
"loss": 0.7539, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.6005732152162585, |
|
"grad_norm": 0.9185714721679688, |
|
"learning_rate": 2.3183718859784438e-05, |
|
"loss": 0.6981, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.6018759770713914, |
|
"grad_norm": 1.016922116279602, |
|
"learning_rate": 2.3053551815868828e-05, |
|
"loss": 0.7765, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.6031787389265242, |
|
"grad_norm": 0.9666367769241333, |
|
"learning_rate": 2.2923550077636375e-05, |
|
"loss": 0.7383, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.6044815007816571, |
|
"grad_norm": 0.9038827419281006, |
|
"learning_rate": 2.2793715920018984e-05, |
|
"loss": 0.7251, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.60578426263679, |
|
"grad_norm": 1.0051590204238892, |
|
"learning_rate": 2.2664051615015998e-05, |
|
"loss": 0.7762, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.6070870244919229, |
|
"grad_norm": 0.9449974894523621, |
|
"learning_rate": 2.253455943165448e-05, |
|
"loss": 0.7366, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.6083897863470558, |
|
"grad_norm": 0.9895748496055603, |
|
"learning_rate": 2.2405241635949466e-05, |
|
"loss": 0.7917, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.6096925482021887, |
|
"grad_norm": 0.9719516038894653, |
|
"learning_rate": 2.2276100490864383e-05, |
|
"loss": 0.8011, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.6109953100573216, |
|
"grad_norm": 1.0024269819259644, |
|
"learning_rate": 2.2147138256271367e-05, |
|
"loss": 0.82, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.6122980719124544, |
|
"grad_norm": 0.9872387051582336, |
|
"learning_rate": 2.2018357188911808e-05, |
|
"loss": 0.8135, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.6136008337675873, |
|
"grad_norm": 1.003280520439148, |
|
"learning_rate": 2.1889759542356744e-05, |
|
"loss": 0.7901, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.6149035956227201, |
|
"grad_norm": 0.9519487023353577, |
|
"learning_rate": 2.1761347566967527e-05, |
|
"loss": 0.7363, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.616206357477853, |
|
"grad_norm": 0.955209493637085, |
|
"learning_rate": 2.16331235098564e-05, |
|
"loss": 0.7903, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.6175091193329859, |
|
"grad_norm": 0.9265815615653992, |
|
"learning_rate": 2.150508961484716e-05, |
|
"loss": 0.7099, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.6188118811881188, |
|
"grad_norm": 0.9495141506195068, |
|
"learning_rate": 2.1377248122435933e-05, |
|
"loss": 0.7583, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.6201146430432517, |
|
"grad_norm": 0.9717061519622803, |
|
"learning_rate": 2.124960126975194e-05, |
|
"loss": 0.7672, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.6214174048983846, |
|
"grad_norm": 0.969534158706665, |
|
"learning_rate": 2.112215129051835e-05, |
|
"loss": 0.762, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.6227201667535175, |
|
"grad_norm": 0.9770892858505249, |
|
"learning_rate": 2.0994900415013196e-05, |
|
"loss": 0.77, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.6240229286086504, |
|
"grad_norm": 1.0208752155303955, |
|
"learning_rate": 2.0867850870030346e-05, |
|
"loss": 0.7682, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.6253256904637832, |
|
"grad_norm": 0.9410081505775452, |
|
"learning_rate": 2.0741004878840553e-05, |
|
"loss": 0.7782, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.6266284523189161, |
|
"grad_norm": 0.9654091000556946, |
|
"learning_rate": 2.0614364661152484e-05, |
|
"loss": 0.7565, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.627931214174049, |
|
"grad_norm": 0.9786356091499329, |
|
"learning_rate": 2.0487932433073992e-05, |
|
"loss": 0.786, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.6292339760291819, |
|
"grad_norm": 1.0080342292785645, |
|
"learning_rate": 2.0361710407073226e-05, |
|
"loss": 0.7865, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.6305367378843147, |
|
"grad_norm": 0.9419269561767578, |
|
"learning_rate": 2.0235700791939983e-05, |
|
"loss": 0.726, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.6318394997394476, |
|
"grad_norm": 0.9085276126861572, |
|
"learning_rate": 2.0109905792746994e-05, |
|
"loss": 0.6675, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.6331422615945805, |
|
"grad_norm": 0.9954288601875305, |
|
"learning_rate": 1.9984327610811408e-05, |
|
"loss": 0.796, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.6344450234497134, |
|
"grad_norm": 0.9211850762367249, |
|
"learning_rate": 1.9858968443656213e-05, |
|
"loss": 0.7299, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.6357477853048463, |
|
"grad_norm": 0.952290415763855, |
|
"learning_rate": 1.973383048497185e-05, |
|
"loss": 0.7991, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.6370505471599791, |
|
"grad_norm": 0.9324260950088501, |
|
"learning_rate": 1.9608915924577718e-05, |
|
"loss": 0.751, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.638353309015112, |
|
"grad_norm": 0.9979082345962524, |
|
"learning_rate": 1.9484226948383953e-05, |
|
"loss": 0.8021, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.6396560708702449, |
|
"grad_norm": 0.9517451524734497, |
|
"learning_rate": 1.9359765738353123e-05, |
|
"loss": 0.8041, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.6409588327253778, |
|
"grad_norm": 1.0136103630065918, |
|
"learning_rate": 1.923553447246207e-05, |
|
"loss": 0.8362, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.6422615945805107, |
|
"grad_norm": 0.939146101474762, |
|
"learning_rate": 1.9111535324663782e-05, |
|
"loss": 0.7419, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.6435643564356436, |
|
"grad_norm": 0.9575389623641968, |
|
"learning_rate": 1.8987770464849367e-05, |
|
"loss": 0.7711, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.6448671182907765, |
|
"grad_norm": 1.0068771839141846, |
|
"learning_rate": 1.8864242058810046e-05, |
|
"loss": 0.7811, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.6461698801459094, |
|
"grad_norm": 0.9527457356452942, |
|
"learning_rate": 1.8740952268199326e-05, |
|
"loss": 0.7456, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.6474726420010423, |
|
"grad_norm": 0.9728114008903503, |
|
"learning_rate": 1.861790325049507e-05, |
|
"loss": 0.7742, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.648775403856175, |
|
"grad_norm": 0.9863789081573486, |
|
"learning_rate": 1.8495097158961796e-05, |
|
"loss": 0.8064, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.6500781657113079, |
|
"grad_norm": 0.9904276132583618, |
|
"learning_rate": 1.8372536142613083e-05, |
|
"loss": 0.7901, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.6513809275664408, |
|
"grad_norm": 0.9304032325744629, |
|
"learning_rate": 1.8250222346173787e-05, |
|
"loss": 0.7527, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6526836894215737, |
|
"grad_norm": 1.0106309652328491, |
|
"learning_rate": 1.8128157910042626e-05, |
|
"loss": 0.7908, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.6539864512767066, |
|
"grad_norm": 0.9775118231773376, |
|
"learning_rate": 1.800634497025474e-05, |
|
"loss": 0.7575, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.6552892131318395, |
|
"grad_norm": 0.9700849652290344, |
|
"learning_rate": 1.7884785658444217e-05, |
|
"loss": 0.7645, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.6565919749869724, |
|
"grad_norm": 1.0006121397018433, |
|
"learning_rate": 1.776348210180689e-05, |
|
"loss": 0.7776, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.6578947368421053, |
|
"grad_norm": 0.9359118938446045, |
|
"learning_rate": 1.764243642306307e-05, |
|
"loss": 0.7699, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.6591974986972381, |
|
"grad_norm": 0.9702383279800415, |
|
"learning_rate": 1.7521650740420365e-05, |
|
"loss": 0.7779, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.660500260552371, |
|
"grad_norm": 0.9199949502944946, |
|
"learning_rate": 1.740112716753669e-05, |
|
"loss": 0.7291, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.6618030224075039, |
|
"grad_norm": 0.9978870153427124, |
|
"learning_rate": 1.7280867813483194e-05, |
|
"loss": 0.8007, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.6631057842626368, |
|
"grad_norm": 1.000036597251892, |
|
"learning_rate": 1.7160874782707445e-05, |
|
"loss": 0.7971, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.6644085461177697, |
|
"grad_norm": 1.0710171461105347, |
|
"learning_rate": 1.704115017499651e-05, |
|
"loss": 0.7708, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.6657113079729026, |
|
"grad_norm": 1.030186653137207, |
|
"learning_rate": 1.6921696085440285e-05, |
|
"loss": 0.823, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.6670140698280355, |
|
"grad_norm": 0.9758132100105286, |
|
"learning_rate": 1.6802514604394815e-05, |
|
"loss": 0.7555, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.6683168316831684, |
|
"grad_norm": 0.95241379737854, |
|
"learning_rate": 1.668360781744565e-05, |
|
"loss": 0.7445, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.6696195935383012, |
|
"grad_norm": 0.9561266899108887, |
|
"learning_rate": 1.6564977805371468e-05, |
|
"loss": 0.7633, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.670922355393434, |
|
"grad_norm": 0.9512428641319275, |
|
"learning_rate": 1.644662664410754e-05, |
|
"loss": 0.8086, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.6722251172485669, |
|
"grad_norm": 0.9314407110214233, |
|
"learning_rate": 1.632855640470947e-05, |
|
"loss": 0.7567, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.6735278791036998, |
|
"grad_norm": 0.9906071424484253, |
|
"learning_rate": 1.6210769153316998e-05, |
|
"loss": 0.7736, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.6748306409588327, |
|
"grad_norm": 0.9421218633651733, |
|
"learning_rate": 1.609326695111772e-05, |
|
"loss": 0.7363, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.6761334028139656, |
|
"grad_norm": 0.9556782245635986, |
|
"learning_rate": 1.59760518543111e-05, |
|
"loss": 0.7377, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.6774361646690985, |
|
"grad_norm": 0.9987593293190002, |
|
"learning_rate": 1.5859125914072512e-05, |
|
"loss": 0.7957, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.6787389265242314, |
|
"grad_norm": 0.9779931902885437, |
|
"learning_rate": 1.5742491176517264e-05, |
|
"loss": 0.7537, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.6800416883793643, |
|
"grad_norm": 0.9649803638458252, |
|
"learning_rate": 1.5626149682664862e-05, |
|
"loss": 0.7694, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.6813444502344971, |
|
"grad_norm": 0.9432051777839661, |
|
"learning_rate": 1.551010346840329e-05, |
|
"loss": 0.7248, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.68264721208963, |
|
"grad_norm": 0.9498631954193115, |
|
"learning_rate": 1.5394354564453304e-05, |
|
"loss": 0.8328, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.6839499739447629, |
|
"grad_norm": 0.9393202662467957, |
|
"learning_rate": 1.527890499633302e-05, |
|
"loss": 0.7467, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.6852527357998958, |
|
"grad_norm": 0.9452553391456604, |
|
"learning_rate": 1.5163756784322351e-05, |
|
"loss": 0.7566, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.6865554976550287, |
|
"grad_norm": 0.94024258852005, |
|
"learning_rate": 1.5048911943427747e-05, |
|
"loss": 0.7374, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.6878582595101616, |
|
"grad_norm": 0.9636396765708923, |
|
"learning_rate": 1.4934372483346853e-05, |
|
"loss": 0.7694, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.6891610213652944, |
|
"grad_norm": 0.9581491947174072, |
|
"learning_rate": 1.482014040843342e-05, |
|
"loss": 0.7678, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.6904637832204273, |
|
"grad_norm": 0.9386518597602844, |
|
"learning_rate": 1.4706217717662188e-05, |
|
"loss": 0.7297, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.6917665450755602, |
|
"grad_norm": 0.9576655030250549, |
|
"learning_rate": 1.4592606404593866e-05, |
|
"loss": 0.7353, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.693069306930693, |
|
"grad_norm": 0.9293531775474548, |
|
"learning_rate": 1.447930845734035e-05, |
|
"loss": 0.7473, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.6943720687858259, |
|
"grad_norm": 1.092103362083435, |
|
"learning_rate": 1.436632585852982e-05, |
|
"loss": 0.8341, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.6956748306409588, |
|
"grad_norm": 0.9893858432769775, |
|
"learning_rate": 1.4253660585272107e-05, |
|
"loss": 0.8196, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.6969775924960917, |
|
"grad_norm": 0.9730793833732605, |
|
"learning_rate": 1.4141314609124138e-05, |
|
"loss": 0.7769, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.6982803543512246, |
|
"grad_norm": 0.9343295097351074, |
|
"learning_rate": 1.4029289896055306e-05, |
|
"loss": 0.7712, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.6995831162063575, |
|
"grad_norm": 0.9929169416427612, |
|
"learning_rate": 1.3917588406413202e-05, |
|
"loss": 0.7903, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.7008858780614904, |
|
"grad_norm": 0.9541279077529907, |
|
"learning_rate": 1.3806212094889223e-05, |
|
"loss": 0.7502, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.7021886399166233, |
|
"grad_norm": 0.9835225343704224, |
|
"learning_rate": 1.3695162910484382e-05, |
|
"loss": 0.7524, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.7034914017717562, |
|
"grad_norm": 0.9914178252220154, |
|
"learning_rate": 1.3584442796475251e-05, |
|
"loss": 0.7724, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.704794163626889, |
|
"grad_norm": 0.9173280000686646, |
|
"learning_rate": 1.3474053690379918e-05, |
|
"loss": 0.6813, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.7060969254820219, |
|
"grad_norm": 0.9709863662719727, |
|
"learning_rate": 1.3363997523924042e-05, |
|
"loss": 0.7565, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.7073996873371547, |
|
"grad_norm": 0.9641502499580383, |
|
"learning_rate": 1.3254276223007141e-05, |
|
"loss": 0.7395, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.7087024491922876, |
|
"grad_norm": 0.9294100999832153, |
|
"learning_rate": 1.314489170766879e-05, |
|
"loss": 0.7741, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.7100052110474205, |
|
"grad_norm": 0.9462564587593079, |
|
"learning_rate": 1.3035845892055128e-05, |
|
"loss": 0.761, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.7113079729025534, |
|
"grad_norm": 0.9625933170318604, |
|
"learning_rate": 1.2927140684385248e-05, |
|
"loss": 0.7662, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.7126107347576863, |
|
"grad_norm": 0.9940834641456604, |
|
"learning_rate": 1.2818777986917903e-05, |
|
"loss": 0.783, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.7139134966128192, |
|
"grad_norm": 0.9370533227920532, |
|
"learning_rate": 1.2710759695918178e-05, |
|
"loss": 0.7543, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.715216258467952, |
|
"grad_norm": 0.9274430871009827, |
|
"learning_rate": 1.2603087701624259e-05, |
|
"loss": 0.7507, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.7165190203230849, |
|
"grad_norm": 0.9402637481689453, |
|
"learning_rate": 1.249576388821447e-05, |
|
"loss": 0.7265, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.7178217821782178, |
|
"grad_norm": 0.9373514652252197, |
|
"learning_rate": 1.2388790133774166e-05, |
|
"loss": 0.7459, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.7191245440333507, |
|
"grad_norm": 0.9380558729171753, |
|
"learning_rate": 1.2282168310262988e-05, |
|
"loss": 0.7654, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.7204273058884836, |
|
"grad_norm": 0.9417213201522827, |
|
"learning_rate": 1.2175900283482032e-05, |
|
"loss": 0.7445, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.7217300677436165, |
|
"grad_norm": 0.9496035575866699, |
|
"learning_rate": 1.2069987913041206e-05, |
|
"loss": 0.7587, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.7230328295987494, |
|
"grad_norm": 1.001635193824768, |
|
"learning_rate": 1.1964433052326736e-05, |
|
"loss": 0.739, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.7243355914538823, |
|
"grad_norm": 0.9350429177284241, |
|
"learning_rate": 1.1859237548468664e-05, |
|
"loss": 0.7486, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.7256383533090152, |
|
"grad_norm": 0.9436745643615723, |
|
"learning_rate": 1.1754403242308564e-05, |
|
"loss": 0.7316, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.7269411151641479, |
|
"grad_norm": 0.9753758907318115, |
|
"learning_rate": 1.1649931968367345e-05, |
|
"loss": 0.7746, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.7282438770192808, |
|
"grad_norm": 0.9551537036895752, |
|
"learning_rate": 1.1545825554813115e-05, |
|
"loss": 0.7619, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.7295466388744137, |
|
"grad_norm": 0.97111976146698, |
|
"learning_rate": 1.1442085823429214e-05, |
|
"loss": 0.716, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.7308494007295466, |
|
"grad_norm": 0.9326804280281067, |
|
"learning_rate": 1.1338714589582294e-05, |
|
"loss": 0.7608, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.7321521625846795, |
|
"grad_norm": 0.9929880499839783, |
|
"learning_rate": 1.1235713662190596e-05, |
|
"loss": 0.807, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.7334549244398124, |
|
"grad_norm": 0.9865292310714722, |
|
"learning_rate": 1.113308484369228e-05, |
|
"loss": 0.8049, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.7347576862949453, |
|
"grad_norm": 0.9389541149139404, |
|
"learning_rate": 1.1030829930013894e-05, |
|
"loss": 0.7129, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.7360604481500782, |
|
"grad_norm": 0.9650609493255615, |
|
"learning_rate": 1.0928950710538904e-05, |
|
"loss": 0.7833, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.7373632100052111, |
|
"grad_norm": 0.960464358329773, |
|
"learning_rate": 1.0827448968076446e-05, |
|
"loss": 0.7684, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.7386659718603439, |
|
"grad_norm": 0.9597406983375549, |
|
"learning_rate": 1.072632647883006e-05, |
|
"loss": 0.7959, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.7399687337154768, |
|
"grad_norm": 0.990420401096344, |
|
"learning_rate": 1.0625585012366685e-05, |
|
"loss": 0.8016, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.7412714955706097, |
|
"grad_norm": 0.9574962258338928, |
|
"learning_rate": 1.0525226331585603e-05, |
|
"loss": 0.7619, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.7425742574257426, |
|
"grad_norm": 0.955730676651001, |
|
"learning_rate": 1.0425252192687666e-05, |
|
"loss": 0.7676, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.7438770192808755, |
|
"grad_norm": 0.9210131764411926, |
|
"learning_rate": 1.0325664345144543e-05, |
|
"loss": 0.7221, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.7451797811360084, |
|
"grad_norm": 0.9439947605133057, |
|
"learning_rate": 1.022646453166806e-05, |
|
"loss": 0.7459, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.7464825429911413, |
|
"grad_norm": 0.9055390357971191, |
|
"learning_rate": 1.0127654488179782e-05, |
|
"loss": 0.6872, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.7477853048462741, |
|
"grad_norm": 0.9416336417198181, |
|
"learning_rate": 1.002923594378056e-05, |
|
"loss": 0.7321, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.7490880667014069, |
|
"grad_norm": 0.9532454013824463, |
|
"learning_rate": 9.93121062072033e-06, |
|
"loss": 0.7372, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.7503908285565398, |
|
"grad_norm": 0.9642922878265381, |
|
"learning_rate": 9.833580234367957e-06, |
|
"loss": 0.7588, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.7516935904116727, |
|
"grad_norm": 1.015443205833435, |
|
"learning_rate": 9.73634649318118e-06, |
|
"loss": 0.8255, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.7529963522668056, |
|
"grad_norm": 0.9452521800994873, |
|
"learning_rate": 9.639511098676792e-06, |
|
"loss": 0.7738, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.7542991141219385, |
|
"grad_norm": 0.9970166683197021, |
|
"learning_rate": 9.54307574540079e-06, |
|
"loss": 0.7715, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.7556018759770714, |
|
"grad_norm": 0.9326934814453125, |
|
"learning_rate": 9.447042120898753e-06, |
|
"loss": 0.7268, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.7569046378322043, |
|
"grad_norm": 0.9375012516975403, |
|
"learning_rate": 9.351411905686326e-06, |
|
"loss": 0.758, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.7582073996873372, |
|
"grad_norm": 0.9717380404472351, |
|
"learning_rate": 9.256186773219795e-06, |
|
"loss": 0.7414, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.7595101615424701, |
|
"grad_norm": 0.9810713529586792, |
|
"learning_rate": 9.161368389866807e-06, |
|
"loss": 0.7543, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.7608129233976029, |
|
"grad_norm": 0.9925931692123413, |
|
"learning_rate": 9.066958414877198e-06, |
|
"loss": 0.804, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.7621156852527358, |
|
"grad_norm": 0.9940099716186523, |
|
"learning_rate": 8.972958500353953e-06, |
|
"loss": 0.7954, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.7634184471078687, |
|
"grad_norm": 0.9288432598114014, |
|
"learning_rate": 8.879370291224357e-06, |
|
"loss": 0.6913, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.7647212089630016, |
|
"grad_norm": 0.9307572841644287, |
|
"learning_rate": 8.786195425211104e-06, |
|
"loss": 0.7346, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.7660239708181344, |
|
"grad_norm": 0.9199413657188416, |
|
"learning_rate": 8.693435532803744e-06, |
|
"loss": 0.719, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.7673267326732673, |
|
"grad_norm": 0.9598354697227478, |
|
"learning_rate": 8.601092237230092e-06, |
|
"loss": 0.7243, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.7686294945284002, |
|
"grad_norm": 0.9506739974021912, |
|
"learning_rate": 8.50916715442781e-06, |
|
"loss": 0.7766, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.7699322563835331, |
|
"grad_norm": 0.9429502487182617, |
|
"learning_rate": 8.417661893016186e-06, |
|
"loss": 0.753, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.771235018238666, |
|
"grad_norm": 0.9368019700050354, |
|
"learning_rate": 8.326578054267923e-06, |
|
"loss": 0.7599, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.7725377800937988, |
|
"grad_norm": 0.9346226453781128, |
|
"learning_rate": 8.23591723208117e-06, |
|
"loss": 0.7414, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.7738405419489317, |
|
"grad_norm": 0.9846335053443909, |
|
"learning_rate": 8.145681012951602e-06, |
|
"loss": 0.7406, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.7751433038040646, |
|
"grad_norm": 0.9535714983940125, |
|
"learning_rate": 8.055870975944645e-06, |
|
"loss": 0.7352, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.7764460656591975, |
|
"grad_norm": 1.0079196691513062, |
|
"learning_rate": 7.966488692667894e-06, |
|
"loss": 0.8008, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.7777488275143304, |
|
"grad_norm": 0.9309155344963074, |
|
"learning_rate": 7.877535727243531e-06, |
|
"loss": 0.7729, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.7790515893694633, |
|
"grad_norm": 0.914409875869751, |
|
"learning_rate": 7.789013636281057e-06, |
|
"loss": 0.758, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.7803543512245962, |
|
"grad_norm": 0.9628679752349854, |
|
"learning_rate": 7.700923968849942e-06, |
|
"loss": 0.738, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.7816571130797291, |
|
"grad_norm": 1.0039963722229004, |
|
"learning_rate": 7.613268266452608e-06, |
|
"loss": 0.7908, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.7829598749348619, |
|
"grad_norm": 0.9205243587493896, |
|
"learning_rate": 7.526048062997418e-06, |
|
"loss": 0.7338, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.7842626367899947, |
|
"grad_norm": 0.9211382865905762, |
|
"learning_rate": 7.439264884771809e-06, |
|
"loss": 0.7683, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.7855653986451276, |
|
"grad_norm": 0.9287407398223877, |
|
"learning_rate": 7.352920250415621e-06, |
|
"loss": 0.7318, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.7868681605002605, |
|
"grad_norm": 0.9374548196792603, |
|
"learning_rate": 7.26701567089452e-06, |
|
"loss": 0.7373, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.7881709223553934, |
|
"grad_norm": 0.9568080306053162, |
|
"learning_rate": 7.181552649473518e-06, |
|
"loss": 0.7311, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.7894736842105263, |
|
"grad_norm": 0.9446233510971069, |
|
"learning_rate": 7.096532681690725e-06, |
|
"loss": 0.7334, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.7907764460656592, |
|
"grad_norm": 0.9356010556221008, |
|
"learning_rate": 7.0119572553311395e-06, |
|
"loss": 0.7408, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.7920792079207921, |
|
"grad_norm": 0.9869657754898071, |
|
"learning_rate": 6.9278278504006025e-06, |
|
"loss": 0.7982, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.793381969775925, |
|
"grad_norm": 0.9460757374763489, |
|
"learning_rate": 6.844145939099953e-06, |
|
"loss": 0.7567, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.7946847316310578, |
|
"grad_norm": 0.9359385967254639, |
|
"learning_rate": 6.760912985799191e-06, |
|
"loss": 0.7334, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.7959874934861907, |
|
"grad_norm": 0.97806316614151, |
|
"learning_rate": 6.678130447011919e-06, |
|
"loss": 0.7616, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.7972902553413236, |
|
"grad_norm": 0.9713260531425476, |
|
"learning_rate": 6.595799771369833e-06, |
|
"loss": 0.7691, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.7985930171964565, |
|
"grad_norm": 0.9556173086166382, |
|
"learning_rate": 6.5139223995973285e-06, |
|
"loss": 0.775, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.7998957790515894, |
|
"grad_norm": 1.0102840662002563, |
|
"learning_rate": 6.432499764486378e-06, |
|
"loss": 0.7562, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.8011985409067223, |
|
"grad_norm": 0.9334094524383545, |
|
"learning_rate": 6.351533290871356e-06, |
|
"loss": 0.7451, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.8025013027618552, |
|
"grad_norm": 0.9497014880180359, |
|
"learning_rate": 6.271024395604204e-06, |
|
"loss": 0.7844, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.8038040646169881, |
|
"grad_norm": 0.9360858201980591, |
|
"learning_rate": 6.190974487529549e-06, |
|
"loss": 0.7351, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.805106826472121, |
|
"grad_norm": 0.9652137756347656, |
|
"learning_rate": 6.111384967460109e-06, |
|
"loss": 0.7304, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.8064095883272537, |
|
"grad_norm": 1.0080219507217407, |
|
"learning_rate": 6.032257228152174e-06, |
|
"loss": 0.744, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.8077123501823866, |
|
"grad_norm": 0.9796493053436279, |
|
"learning_rate": 5.953592654281195e-06, |
|
"loss": 0.7743, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.8090151120375195, |
|
"grad_norm": 0.9980567097663879, |
|
"learning_rate": 5.875392622417587e-06, |
|
"loss": 0.7472, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.8103178738926524, |
|
"grad_norm": 0.9584153890609741, |
|
"learning_rate": 5.797658501002642e-06, |
|
"loss": 0.7674, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.8116206357477853, |
|
"grad_norm": 0.9498617053031921, |
|
"learning_rate": 5.720391650324549e-06, |
|
"loss": 0.7721, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.8129233976029182, |
|
"grad_norm": 0.9590067863464355, |
|
"learning_rate": 5.643593422494659e-06, |
|
"loss": 0.7732, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.8142261594580511, |
|
"grad_norm": 0.984322726726532, |
|
"learning_rate": 5.567265161423734e-06, |
|
"loss": 0.7804, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.815528921313184, |
|
"grad_norm": 0.9647397398948669, |
|
"learning_rate": 5.491408202798478e-06, |
|
"loss": 0.7912, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.8168316831683168, |
|
"grad_norm": 0.9693639874458313, |
|
"learning_rate": 5.416023874058186e-06, |
|
"loss": 0.788, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.8181344450234497, |
|
"grad_norm": 0.9778026342391968, |
|
"learning_rate": 5.341113494371459e-06, |
|
"loss": 0.7861, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.8194372068785826, |
|
"grad_norm": 0.9577138423919678, |
|
"learning_rate": 5.266678374613173e-06, |
|
"loss": 0.7573, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.8207399687337155, |
|
"grad_norm": 0.9810266494750977, |
|
"learning_rate": 5.192719817341506e-06, |
|
"loss": 0.7461, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8220427305888484, |
|
"grad_norm": 0.9479625821113586, |
|
"learning_rate": 5.119239116775138e-06, |
|
"loss": 0.7256, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.8233454924439813, |
|
"grad_norm": 0.9626233577728271, |
|
"learning_rate": 5.0462375587706544e-06, |
|
"loss": 0.7866, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.8246482542991141, |
|
"grad_norm": 0.9677842855453491, |
|
"learning_rate": 4.973716420799968e-06, |
|
"loss": 0.7406, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.825951016154247, |
|
"grad_norm": 0.9668269753456116, |
|
"learning_rate": 4.9016769719280335e-06, |
|
"loss": 0.7611, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.8272537780093799, |
|
"grad_norm": 0.9700765609741211, |
|
"learning_rate": 4.830120472790607e-06, |
|
"loss": 0.7734, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.8285565398645127, |
|
"grad_norm": 0.9845298528671265, |
|
"learning_rate": 4.7590481755721805e-06, |
|
"loss": 0.7853, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.8298593017196456, |
|
"grad_norm": 0.9391557574272156, |
|
"learning_rate": 4.688461323984089e-06, |
|
"loss": 0.7414, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.8311620635747785, |
|
"grad_norm": 0.9318038821220398, |
|
"learning_rate": 4.618361153242729e-06, |
|
"loss": 0.7686, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.8324648254299114, |
|
"grad_norm": 0.946306049823761, |
|
"learning_rate": 4.548748890047959e-06, |
|
"loss": 0.7654, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.8337675872850443, |
|
"grad_norm": 0.9474648833274841, |
|
"learning_rate": 4.479625752561618e-06, |
|
"loss": 0.78, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.8350703491401772, |
|
"grad_norm": 0.9368411302566528, |
|
"learning_rate": 4.410992950386217e-06, |
|
"loss": 0.7208, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.8363731109953101, |
|
"grad_norm": 0.9060863852500916, |
|
"learning_rate": 4.342851684543785e-06, |
|
"loss": 0.6963, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.837675872850443, |
|
"grad_norm": 1.0060490369796753, |
|
"learning_rate": 4.2752031474548176e-06, |
|
"loss": 0.7775, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.8389786347055758, |
|
"grad_norm": 0.9650358557701111, |
|
"learning_rate": 4.208048522917433e-06, |
|
"loss": 0.7656, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.8402813965607087, |
|
"grad_norm": 0.9482013583183289, |
|
"learning_rate": 4.141388986086671e-06, |
|
"loss": 0.764, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.8415841584158416, |
|
"grad_norm": 0.9347854256629944, |
|
"learning_rate": 4.075225703453885e-06, |
|
"loss": 0.7992, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.8428869202709744, |
|
"grad_norm": 0.9543832540512085, |
|
"learning_rate": 4.009559832826393e-06, |
|
"loss": 0.7108, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.8441896821261073, |
|
"grad_norm": 0.9428550004959106, |
|
"learning_rate": 3.944392523307151e-06, |
|
"loss": 0.7159, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.8454924439812402, |
|
"grad_norm": 0.9633643627166748, |
|
"learning_rate": 3.879724915274667e-06, |
|
"loss": 0.7471, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.8467952058363731, |
|
"grad_norm": 0.9850764274597168, |
|
"learning_rate": 3.815558140363089e-06, |
|
"loss": 0.7794, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.848097967691506, |
|
"grad_norm": 0.9603021740913391, |
|
"learning_rate": 3.7518933214423196e-06, |
|
"loss": 0.7636, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.8494007295466389, |
|
"grad_norm": 0.9841204285621643, |
|
"learning_rate": 3.688731572598446e-06, |
|
"loss": 0.7387, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.8507034914017717, |
|
"grad_norm": 0.9528425335884094, |
|
"learning_rate": 3.626073999114207e-06, |
|
"loss": 0.7731, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.8520062532569046, |
|
"grad_norm": 0.9437130689620972, |
|
"learning_rate": 3.563921697449628e-06, |
|
"loss": 0.7295, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.8533090151120375, |
|
"grad_norm": 0.9948983788490295, |
|
"learning_rate": 3.5022757552228956e-06, |
|
"loss": 0.8296, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.8546117769671704, |
|
"grad_norm": 0.9456334114074707, |
|
"learning_rate": 3.4411372511912584e-06, |
|
"loss": 0.7434, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.8559145388223033, |
|
"grad_norm": 0.9342964291572571, |
|
"learning_rate": 3.3805072552322135e-06, |
|
"loss": 0.7575, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.8572173006774362, |
|
"grad_norm": 0.9293599724769592, |
|
"learning_rate": 3.320386828324716e-06, |
|
"loss": 0.752, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.8585200625325691, |
|
"grad_norm": 0.9268930554389954, |
|
"learning_rate": 3.260777022530678e-06, |
|
"loss": 0.7239, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.859822824387702, |
|
"grad_norm": 0.9146434664726257, |
|
"learning_rate": 3.2016788809765255e-06, |
|
"loss": 0.6973, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.8611255862428349, |
|
"grad_norm": 0.9759405851364136, |
|
"learning_rate": 3.143093437834927e-06, |
|
"loss": 0.7501, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.8624283480979676, |
|
"grad_norm": 0.9663141369819641, |
|
"learning_rate": 3.0850217183067487e-06, |
|
"loss": 0.7851, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.8637311099531005, |
|
"grad_norm": 0.9489357471466064, |
|
"learning_rate": 3.027464738603053e-06, |
|
"loss": 0.7422, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.8650338718082334, |
|
"grad_norm": 0.9676777720451355, |
|
"learning_rate": 2.97042350592735e-06, |
|
"loss": 0.7558, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.8663366336633663, |
|
"grad_norm": 0.9581413865089417, |
|
"learning_rate": 2.913899018458003e-06, |
|
"loss": 0.7383, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.8676393955184992, |
|
"grad_norm": 0.9642050862312317, |
|
"learning_rate": 2.857892265330682e-06, |
|
"loss": 0.7703, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.8689421573736321, |
|
"grad_norm": 0.9483092427253723, |
|
"learning_rate": 2.8024042266211144e-06, |
|
"loss": 0.7391, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.870244919228765, |
|
"grad_norm": 0.9691247940063477, |
|
"learning_rate": 2.7474358733279315e-06, |
|
"loss": 0.7394, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.8715476810838979, |
|
"grad_norm": 1.0193132162094116, |
|
"learning_rate": 2.6929881673556342e-06, |
|
"loss": 0.8215, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.8728504429390307, |
|
"grad_norm": 0.9637281894683838, |
|
"learning_rate": 2.6390620614978235e-06, |
|
"loss": 0.7648, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.8741532047941636, |
|
"grad_norm": 0.901021420955658, |
|
"learning_rate": 2.585658499420474e-06, |
|
"loss": 0.6952, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.8754559666492965, |
|
"grad_norm": 0.974763035774231, |
|
"learning_rate": 2.532778415645441e-06, |
|
"loss": 0.8254, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.8767587285044294, |
|
"grad_norm": 0.9477525353431702, |
|
"learning_rate": 2.480422735534115e-06, |
|
"loss": 0.7004, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.8780614903595623, |
|
"grad_norm": 0.9798365235328674, |
|
"learning_rate": 2.428592375271204e-06, |
|
"loss": 0.7747, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.8793642522146952, |
|
"grad_norm": 0.9412257671356201, |
|
"learning_rate": 2.377288241848741e-06, |
|
"loss": 0.7368, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.8806670140698281, |
|
"grad_norm": 1.0274877548217773, |
|
"learning_rate": 2.3265112330501605e-06, |
|
"loss": 0.7879, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.881969775924961, |
|
"grad_norm": 0.9534391164779663, |
|
"learning_rate": 2.276262237434635e-06, |
|
"loss": 0.7561, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.8832725377800938, |
|
"grad_norm": 0.9891889095306396, |
|
"learning_rate": 2.226542134321508e-06, |
|
"loss": 0.7845, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.8845752996352266, |
|
"grad_norm": 0.9614699482917786, |
|
"learning_rate": 2.1773517937748853e-06, |
|
"loss": 0.7708, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.8858780614903595, |
|
"grad_norm": 0.9503171443939209, |
|
"learning_rate": 2.1286920765884555e-06, |
|
"loss": 0.7286, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.8871808233454924, |
|
"grad_norm": 0.9154089689254761, |
|
"learning_rate": 2.0805638342703788e-06, |
|
"loss": 0.7382, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.8884835852006253, |
|
"grad_norm": 1.0028704404830933, |
|
"learning_rate": 2.0329679090284097e-06, |
|
"loss": 0.7546, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.8897863470557582, |
|
"grad_norm": 0.9367740750312805, |
|
"learning_rate": 1.9859051337551805e-06, |
|
"loss": 0.7183, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.8910891089108911, |
|
"grad_norm": 0.9662940502166748, |
|
"learning_rate": 1.9393763320135793e-06, |
|
"loss": 0.7821, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.892391870766024, |
|
"grad_norm": 1.0092779397964478, |
|
"learning_rate": 1.8933823180223646e-06, |
|
"loss": 0.829, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.8936946326211569, |
|
"grad_norm": 0.963880717754364, |
|
"learning_rate": 1.8479238966419233e-06, |
|
"loss": 0.7684, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.8949973944762898, |
|
"grad_norm": 0.9169959425926208, |
|
"learning_rate": 1.8030018633601743e-06, |
|
"loss": 0.735, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.8963001563314226, |
|
"grad_norm": 0.9603608250617981, |
|
"learning_rate": 1.7586170042786468e-06, |
|
"loss": 0.7843, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.8976029181865555, |
|
"grad_norm": 1.0876539945602417, |
|
"learning_rate": 1.7147700960987448e-06, |
|
"loss": 0.7721, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.8989056800416884, |
|
"grad_norm": 0.9588826894760132, |
|
"learning_rate": 1.6714619061081107e-06, |
|
"loss": 0.7405, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.9002084418968213, |
|
"grad_norm": 0.9661469459533691, |
|
"learning_rate": 1.6286931921672577e-06, |
|
"loss": 0.7357, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.9015112037519541, |
|
"grad_norm": 0.9673269987106323, |
|
"learning_rate": 1.586464702696251e-06, |
|
"loss": 0.7833, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.902813965607087, |
|
"grad_norm": 0.9617257714271545, |
|
"learning_rate": 1.5447771766616573e-06, |
|
"loss": 0.7495, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.9041167274622199, |
|
"grad_norm": 0.9493683576583862, |
|
"learning_rate": 1.5036313435635772e-06, |
|
"loss": 0.7559, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.9054194893173528, |
|
"grad_norm": 0.9315470457077026, |
|
"learning_rate": 1.4630279234229108e-06, |
|
"loss": 0.7869, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.9067222511724856, |
|
"grad_norm": 0.9171879291534424, |
|
"learning_rate": 1.42296762676873e-06, |
|
"loss": 0.7522, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.9080250130276185, |
|
"grad_norm": 0.9789528250694275, |
|
"learning_rate": 1.383451154625862e-06, |
|
"loss": 0.7815, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.9093277748827514, |
|
"grad_norm": 0.9797160625457764, |
|
"learning_rate": 1.344479198502621e-06, |
|
"loss": 0.7783, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.9106305367378843, |
|
"grad_norm": 0.9675379991531372, |
|
"learning_rate": 1.3060524403786963e-06, |
|
"loss": 0.7659, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.9119332985930172, |
|
"grad_norm": 0.9739587306976318, |
|
"learning_rate": 1.2681715526932263e-06, |
|
"loss": 0.7492, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.9132360604481501, |
|
"grad_norm": 0.9374343156814575, |
|
"learning_rate": 1.2308371983330434e-06, |
|
"loss": 0.757, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.914538822303283, |
|
"grad_norm": 0.9765582084655762, |
|
"learning_rate": 1.1940500306210373e-06, |
|
"loss": 0.7895, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.9158415841584159, |
|
"grad_norm": 0.978489339351654, |
|
"learning_rate": 1.1578106933047748e-06, |
|
"loss": 0.76, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.9171443460135488, |
|
"grad_norm": 0.9419280290603638, |
|
"learning_rate": 1.1221198205451804e-06, |
|
"loss": 0.7331, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.9184471078686816, |
|
"grad_norm": 0.9444454908370972, |
|
"learning_rate": 1.08697803690547e-06, |
|
"loss": 0.7567, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.9197498697238145, |
|
"grad_norm": 0.9394042491912842, |
|
"learning_rate": 1.0523859573402333e-06, |
|
"loss": 0.7127, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.9210526315789473, |
|
"grad_norm": 0.9512674808502197, |
|
"learning_rate": 1.0183441871846434e-06, |
|
"loss": 0.7307, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.9223553934340802, |
|
"grad_norm": 0.9305934309959412, |
|
"learning_rate": 9.848533221438651e-07, |
|
"loss": 0.7308, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.9236581552892131, |
|
"grad_norm": 0.9447265863418579, |
|
"learning_rate": 9.519139482826615e-07, |
|
"loss": 0.7392, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.924960917144346, |
|
"grad_norm": 0.9535877704620361, |
|
"learning_rate": 9.195266420150973e-07, |
|
"loss": 0.7627, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.9262636789994789, |
|
"grad_norm": 0.9254609942436218, |
|
"learning_rate": 8.876919700944965e-07, |
|
"loss": 0.732, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.9275664408546118, |
|
"grad_norm": 0.9060986042022705, |
|
"learning_rate": 8.564104896034838e-07, |
|
"loss": 0.6953, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.9288692027097447, |
|
"grad_norm": 0.9409303069114685, |
|
"learning_rate": 8.256827479442496e-07, |
|
"loss": 0.7499, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.9301719645648775, |
|
"grad_norm": 0.9318488240242004, |
|
"learning_rate": 7.955092828289957e-07, |
|
"loss": 0.7525, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.9314747264200104, |
|
"grad_norm": 0.9338665008544922, |
|
"learning_rate": 7.658906222704817e-07, |
|
"loss": 0.7476, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.9327774882751433, |
|
"grad_norm": 0.9336937665939331, |
|
"learning_rate": 7.368272845728232e-07, |
|
"loss": 0.7333, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.9340802501302762, |
|
"grad_norm": 0.940027117729187, |
|
"learning_rate": 7.083197783223967e-07, |
|
"loss": 0.7417, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.9353830119854091, |
|
"grad_norm": 1.0003453493118286, |
|
"learning_rate": 6.803686023789557e-07, |
|
"loss": 0.7986, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.936685773840542, |
|
"grad_norm": 0.9539536833763123, |
|
"learning_rate": 6.529742458668944e-07, |
|
"loss": 0.7479, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.9379885356956749, |
|
"grad_norm": 0.9201922416687012, |
|
"learning_rate": 6.261371881666866e-07, |
|
"loss": 0.7007, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.9392912975508078, |
|
"grad_norm": 0.9739858508110046, |
|
"learning_rate": 5.998578989064994e-07, |
|
"loss": 0.7835, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.9405940594059405, |
|
"grad_norm": 0.960044801235199, |
|
"learning_rate": 5.741368379539815e-07, |
|
"loss": 0.7701, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.9418968212610734, |
|
"grad_norm": 0.9575189352035522, |
|
"learning_rate": 5.489744554081943e-07, |
|
"loss": 0.7417, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.9431995831162063, |
|
"grad_norm": 0.944108784198761, |
|
"learning_rate": 5.243711915917684e-07, |
|
"loss": 0.7382, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.9445023449713392, |
|
"grad_norm": 0.9433971643447876, |
|
"learning_rate": 5.003274770431709e-07, |
|
"loss": 0.7354, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.9458051068264721, |
|
"grad_norm": 0.9371641874313354, |
|
"learning_rate": 4.768437325091787e-07, |
|
"loss": 0.7659, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.947107868681605, |
|
"grad_norm": 0.947584867477417, |
|
"learning_rate": 4.539203689375284e-07, |
|
"loss": 0.7791, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.9484106305367379, |
|
"grad_norm": 0.9602799415588379, |
|
"learning_rate": 4.3155778746969655e-07, |
|
"loss": 0.7663, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.9497133923918708, |
|
"grad_norm": 0.9584361910820007, |
|
"learning_rate": 4.0975637943391733e-07, |
|
"loss": 0.7451, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.9510161542470037, |
|
"grad_norm": 0.9507699012756348, |
|
"learning_rate": 3.8851652633830553e-07, |
|
"loss": 0.7773, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.9523189161021365, |
|
"grad_norm": 0.9461563229560852, |
|
"learning_rate": 3.6783859986418864e-07, |
|
"loss": 0.7853, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.9536216779572694, |
|
"grad_norm": 1.0070527791976929, |
|
"learning_rate": 3.477229618596117e-07, |
|
"loss": 0.8189, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.9549244398124023, |
|
"grad_norm": 0.9603460431098938, |
|
"learning_rate": 3.2816996433298716e-07, |
|
"loss": 0.7777, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.9562272016675352, |
|
"grad_norm": 1.0003293752670288, |
|
"learning_rate": 3.091799494469535e-07, |
|
"loss": 0.7592, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.9575299635226681, |
|
"grad_norm": 0.9486605525016785, |
|
"learning_rate": 2.907532495123785e-07, |
|
"loss": 0.769, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.958832725377801, |
|
"grad_norm": 0.9506307244300842, |
|
"learning_rate": 2.7289018698254254e-07, |
|
"loss": 0.7549, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.9601354872329338, |
|
"grad_norm": 0.9510558843612671, |
|
"learning_rate": 2.5559107444750286e-07, |
|
"loss": 0.7559, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.9614382490880667, |
|
"grad_norm": 0.9573996663093567, |
|
"learning_rate": 2.388562146286124e-07, |
|
"loss": 0.7185, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.9627410109431996, |
|
"grad_norm": 0.9380077123641968, |
|
"learning_rate": 2.226859003732412e-07, |
|
"loss": 0.774, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.9640437727983324, |
|
"grad_norm": 0.9628310203552246, |
|
"learning_rate": 2.0708041464962355e-07, |
|
"loss": 0.7434, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.9653465346534653, |
|
"grad_norm": 0.9332795143127441, |
|
"learning_rate": 1.9204003054193673e-07, |
|
"loss": 0.7318, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.9666492965085982, |
|
"grad_norm": 0.9946935176849365, |
|
"learning_rate": 1.775650112455018e-07, |
|
"loss": 0.8075, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.9679520583637311, |
|
"grad_norm": 1.030559778213501, |
|
"learning_rate": 1.6365561006218332e-07, |
|
"loss": 0.7947, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.969254820218864, |
|
"grad_norm": 0.9133543968200684, |
|
"learning_rate": 1.5031207039595817e-07, |
|
"loss": 0.7355, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.9705575820739969, |
|
"grad_norm": 0.9441936016082764, |
|
"learning_rate": 1.3753462574866175e-07, |
|
"loss": 0.7574, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.9718603439291298, |
|
"grad_norm": 0.968670129776001, |
|
"learning_rate": 1.253234997158853e-07, |
|
"loss": 0.8127, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.9731631057842627, |
|
"grad_norm": 0.9429108500480652, |
|
"learning_rate": 1.1367890598308254e-07, |
|
"loss": 0.7615, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.9744658676393955, |
|
"grad_norm": 0.9607934951782227, |
|
"learning_rate": 1.026010483218137e-07, |
|
"loss": 0.7497, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.9757686294945284, |
|
"grad_norm": 0.924278736114502, |
|
"learning_rate": 9.209012058619485e-08, |
|
"loss": 0.6813, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.9770713913496613, |
|
"grad_norm": 0.9327111840248108, |
|
"learning_rate": 8.214630670949185e-08, |
|
"loss": 0.7166, |
|
"step": 750 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 767, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.153106907449131e+18, |
|
"train_batch_size": 6, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|