|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 19.71563981042654, |
|
"eval_steps": 500, |
|
"global_step": 1040, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.1895734597156398, |
|
"grad_norm": 0.597112774848938, |
|
"learning_rate": 2.9711538461538464e-05, |
|
"loss": 0.435, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.3791469194312796, |
|
"grad_norm": 0.16265545785427094, |
|
"learning_rate": 2.9423076923076923e-05, |
|
"loss": 0.1842, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.5687203791469194, |
|
"grad_norm": 0.13190606236457825, |
|
"learning_rate": 2.9134615384615386e-05, |
|
"loss": 0.1818, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.7582938388625592, |
|
"grad_norm": 0.1712672859430313, |
|
"learning_rate": 2.884615384615385e-05, |
|
"loss": 0.1685, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.9478672985781991, |
|
"grad_norm": 0.17000597715377808, |
|
"learning_rate": 2.855769230769231e-05, |
|
"loss": 0.1694, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.1374407582938388, |
|
"grad_norm": 0.04239616543054581, |
|
"learning_rate": 2.8269230769230768e-05, |
|
"loss": 0.1564, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.3270142180094786, |
|
"grad_norm": 0.055856816470623016, |
|
"learning_rate": 2.798076923076923e-05, |
|
"loss": 0.1675, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.5165876777251186, |
|
"grad_norm": 0.05924641340970993, |
|
"learning_rate": 2.7692307692307694e-05, |
|
"loss": 0.1751, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.7061611374407581, |
|
"grad_norm": 0.04663912579417229, |
|
"learning_rate": 2.7403846153846153e-05, |
|
"loss": 0.1598, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.8957345971563981, |
|
"grad_norm": 0.07988695800304413, |
|
"learning_rate": 2.7115384615384616e-05, |
|
"loss": 0.1489, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 2.085308056872038, |
|
"grad_norm": 0.1717785745859146, |
|
"learning_rate": 2.682692307692308e-05, |
|
"loss": 0.1668, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.2748815165876777, |
|
"grad_norm": 0.15836206078529358, |
|
"learning_rate": 2.6538461538461538e-05, |
|
"loss": 0.1543, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.4644549763033177, |
|
"grad_norm": 0.023397250100970268, |
|
"learning_rate": 2.625e-05, |
|
"loss": 0.1686, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.654028436018957, |
|
"grad_norm": 0.2287532091140747, |
|
"learning_rate": 2.5961538461538464e-05, |
|
"loss": 0.1645, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.843601895734597, |
|
"grad_norm": 0.3427872061729431, |
|
"learning_rate": 2.5673076923076923e-05, |
|
"loss": 0.133, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 3.0331753554502368, |
|
"grad_norm": 0.307771772146225, |
|
"learning_rate": 2.5384615384615386e-05, |
|
"loss": 0.103, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 3.2227488151658767, |
|
"grad_norm": 0.30710339546203613, |
|
"learning_rate": 2.509615384615385e-05, |
|
"loss": 0.0924, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 3.4123222748815167, |
|
"grad_norm": 0.20707634091377258, |
|
"learning_rate": 2.4807692307692305e-05, |
|
"loss": 0.0749, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 3.6018957345971563, |
|
"grad_norm": 0.2535831034183502, |
|
"learning_rate": 2.4519230769230768e-05, |
|
"loss": 0.0704, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 3.7914691943127963, |
|
"grad_norm": 0.2260940968990326, |
|
"learning_rate": 2.423076923076923e-05, |
|
"loss": 0.0688, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.9810426540284363, |
|
"grad_norm": 0.21390962600708008, |
|
"learning_rate": 2.3942307692307694e-05, |
|
"loss": 0.0585, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 4.170616113744076, |
|
"grad_norm": 0.1833769828081131, |
|
"learning_rate": 2.3653846153846153e-05, |
|
"loss": 0.054, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 4.360189573459715, |
|
"grad_norm": 0.2922685146331787, |
|
"learning_rate": 2.3365384615384616e-05, |
|
"loss": 0.0538, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 4.549763033175355, |
|
"grad_norm": 0.2239004522562027, |
|
"learning_rate": 2.307692307692308e-05, |
|
"loss": 0.0543, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 4.739336492890995, |
|
"grad_norm": 0.17778703570365906, |
|
"learning_rate": 2.278846153846154e-05, |
|
"loss": 0.0561, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 4.928909952606635, |
|
"grad_norm": 0.3559529483318329, |
|
"learning_rate": 2.25e-05, |
|
"loss": 0.0493, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 5.118483412322275, |
|
"grad_norm": 0.12670384347438812, |
|
"learning_rate": 2.2211538461538464e-05, |
|
"loss": 0.049, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 5.308056872037914, |
|
"grad_norm": 0.19230520725250244, |
|
"learning_rate": 2.1923076923076924e-05, |
|
"loss": 0.0477, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 5.497630331753554, |
|
"grad_norm": 0.17324426770210266, |
|
"learning_rate": 2.1634615384615383e-05, |
|
"loss": 0.0434, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 5.687203791469194, |
|
"grad_norm": 0.310581237077713, |
|
"learning_rate": 2.1346153846153846e-05, |
|
"loss": 0.0437, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 5.876777251184834, |
|
"grad_norm": 0.14988450706005096, |
|
"learning_rate": 2.1057692307692306e-05, |
|
"loss": 0.0401, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 6.0663507109004735, |
|
"grad_norm": 0.1281799077987671, |
|
"learning_rate": 2.076923076923077e-05, |
|
"loss": 0.0397, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 6.2559241706161135, |
|
"grad_norm": 0.1899699568748474, |
|
"learning_rate": 2.048076923076923e-05, |
|
"loss": 0.0399, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 6.4454976303317535, |
|
"grad_norm": 0.14849646389484406, |
|
"learning_rate": 2.0192307692307694e-05, |
|
"loss": 0.0394, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 6.6350710900473935, |
|
"grad_norm": 0.1111103966832161, |
|
"learning_rate": 1.9903846153846154e-05, |
|
"loss": 0.0361, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 6.8246445497630335, |
|
"grad_norm": 0.1525595635175705, |
|
"learning_rate": 1.9615384615384617e-05, |
|
"loss": 0.039, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 7.014218009478673, |
|
"grad_norm": 0.2204810231924057, |
|
"learning_rate": 1.932692307692308e-05, |
|
"loss": 0.0374, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 7.203791469194313, |
|
"grad_norm": 0.1169096827507019, |
|
"learning_rate": 1.903846153846154e-05, |
|
"loss": 0.0339, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 7.393364928909953, |
|
"grad_norm": 0.09804651141166687, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 0.0359, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 7.5829383886255926, |
|
"grad_norm": 0.14222651720046997, |
|
"learning_rate": 1.8461538461538465e-05, |
|
"loss": 0.0321, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 7.7725118483412325, |
|
"grad_norm": 0.0966310054063797, |
|
"learning_rate": 1.817307692307692e-05, |
|
"loss": 0.0327, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 7.9620853080568725, |
|
"grad_norm": 0.11295436322689056, |
|
"learning_rate": 1.7884615384615384e-05, |
|
"loss": 0.0359, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 8.151658767772512, |
|
"grad_norm": 0.09200425446033478, |
|
"learning_rate": 1.7596153846153846e-05, |
|
"loss": 0.0333, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 8.341232227488153, |
|
"grad_norm": 0.08960484713315964, |
|
"learning_rate": 1.7307692307692306e-05, |
|
"loss": 0.03, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 8.530805687203792, |
|
"grad_norm": 0.1008763536810875, |
|
"learning_rate": 1.701923076923077e-05, |
|
"loss": 0.0328, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 8.72037914691943, |
|
"grad_norm": 0.18892982602119446, |
|
"learning_rate": 1.673076923076923e-05, |
|
"loss": 0.0316, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 8.909952606635072, |
|
"grad_norm": 0.16451339423656464, |
|
"learning_rate": 1.6442307692307695e-05, |
|
"loss": 0.0301, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 9.09952606635071, |
|
"grad_norm": 0.14853718876838684, |
|
"learning_rate": 1.6153846153846154e-05, |
|
"loss": 0.0302, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 9.289099526066352, |
|
"grad_norm": 0.0903206467628479, |
|
"learning_rate": 1.5865384615384617e-05, |
|
"loss": 0.0314, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 9.47867298578199, |
|
"grad_norm": 0.16619105637073517, |
|
"learning_rate": 1.557692307692308e-05, |
|
"loss": 0.0278, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 9.66824644549763, |
|
"grad_norm": 0.11136868596076965, |
|
"learning_rate": 1.528846153846154e-05, |
|
"loss": 0.0303, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 9.85781990521327, |
|
"grad_norm": 0.09969611465930939, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.0299, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 10.04739336492891, |
|
"grad_norm": 0.09576759487390518, |
|
"learning_rate": 1.4711538461538462e-05, |
|
"loss": 0.0278, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 10.23696682464455, |
|
"grad_norm": 0.1151731088757515, |
|
"learning_rate": 1.4423076923076924e-05, |
|
"loss": 0.0269, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 10.42654028436019, |
|
"grad_norm": 0.2266397476196289, |
|
"learning_rate": 1.4134615384615384e-05, |
|
"loss": 0.0289, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 10.616113744075829, |
|
"grad_norm": 0.12434451282024384, |
|
"learning_rate": 1.3846153846153847e-05, |
|
"loss": 0.0261, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 10.80568720379147, |
|
"grad_norm": 0.1091102734208107, |
|
"learning_rate": 1.3557692307692308e-05, |
|
"loss": 0.028, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 10.995260663507109, |
|
"grad_norm": 0.08422732353210449, |
|
"learning_rate": 1.3269230769230769e-05, |
|
"loss": 0.0256, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 11.184834123222748, |
|
"grad_norm": 0.16355375945568085, |
|
"learning_rate": 1.2980769230769232e-05, |
|
"loss": 0.0269, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 11.374407582938389, |
|
"grad_norm": 0.26286429166793823, |
|
"learning_rate": 1.2692307692307693e-05, |
|
"loss": 0.0239, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 11.563981042654028, |
|
"grad_norm": 0.0981736034154892, |
|
"learning_rate": 1.2403846153846153e-05, |
|
"loss": 0.0291, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 11.753554502369669, |
|
"grad_norm": 0.11100564897060394, |
|
"learning_rate": 1.2115384615384615e-05, |
|
"loss": 0.0251, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 11.943127962085308, |
|
"grad_norm": 0.07715968787670135, |
|
"learning_rate": 1.1826923076923077e-05, |
|
"loss": 0.0266, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 12.132701421800947, |
|
"grad_norm": 0.12548160552978516, |
|
"learning_rate": 1.153846153846154e-05, |
|
"loss": 0.0232, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 12.322274881516588, |
|
"grad_norm": 0.11867664009332657, |
|
"learning_rate": 1.125e-05, |
|
"loss": 0.0239, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 12.511848341232227, |
|
"grad_norm": 0.124052494764328, |
|
"learning_rate": 1.0961538461538462e-05, |
|
"loss": 0.0258, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 12.701421800947868, |
|
"grad_norm": 0.45495718717575073, |
|
"learning_rate": 1.0673076923076923e-05, |
|
"loss": 0.0242, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 12.890995260663507, |
|
"grad_norm": 0.0678386464715004, |
|
"learning_rate": 1.0384615384615384e-05, |
|
"loss": 0.0242, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 13.080568720379146, |
|
"grad_norm": 0.06402502954006195, |
|
"learning_rate": 1.0096153846153847e-05, |
|
"loss": 0.0253, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 13.270142180094787, |
|
"grad_norm": 0.10899492353200912, |
|
"learning_rate": 9.807692307692308e-06, |
|
"loss": 0.0234, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 13.459715639810426, |
|
"grad_norm": 0.09529020637273788, |
|
"learning_rate": 9.51923076923077e-06, |
|
"loss": 0.0227, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 13.649289099526067, |
|
"grad_norm": 0.10726472735404968, |
|
"learning_rate": 9.230769230769232e-06, |
|
"loss": 0.0226, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 13.838862559241706, |
|
"grad_norm": 0.05985069274902344, |
|
"learning_rate": 8.942307692307692e-06, |
|
"loss": 0.0211, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 14.028436018957345, |
|
"grad_norm": 0.12114176154136658, |
|
"learning_rate": 8.653846153846153e-06, |
|
"loss": 0.0245, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 14.218009478672986, |
|
"grad_norm": 0.11160324513912201, |
|
"learning_rate": 8.365384615384616e-06, |
|
"loss": 0.0196, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 14.407582938388625, |
|
"grad_norm": 0.087327741086483, |
|
"learning_rate": 8.076923076923077e-06, |
|
"loss": 0.0193, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 14.597156398104266, |
|
"grad_norm": 0.06679825484752655, |
|
"learning_rate": 7.78846153846154e-06, |
|
"loss": 0.0206, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 14.786729857819905, |
|
"grad_norm": 0.14305196702480316, |
|
"learning_rate": 7.5e-06, |
|
"loss": 0.0194, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 14.976303317535544, |
|
"grad_norm": 0.11618325114250183, |
|
"learning_rate": 7.211538461538462e-06, |
|
"loss": 0.0257, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 15.165876777251185, |
|
"grad_norm": 0.06295182555913925, |
|
"learning_rate": 6.923076923076923e-06, |
|
"loss": 0.0184, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 15.355450236966824, |
|
"grad_norm": 0.08175351470708847, |
|
"learning_rate": 6.6346153846153846e-06, |
|
"loss": 0.0209, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 15.545023696682465, |
|
"grad_norm": 0.11890088021755219, |
|
"learning_rate": 6.3461538461538466e-06, |
|
"loss": 0.0215, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 15.734597156398104, |
|
"grad_norm": 0.17722468078136444, |
|
"learning_rate": 6.057692307692308e-06, |
|
"loss": 0.0203, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 15.924170616113743, |
|
"grad_norm": 0.11907941102981567, |
|
"learning_rate": 5.76923076923077e-06, |
|
"loss": 0.021, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 16.113744075829384, |
|
"grad_norm": 0.1066436693072319, |
|
"learning_rate": 5.480769230769231e-06, |
|
"loss": 0.0218, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 16.303317535545023, |
|
"grad_norm": 0.053932033479213715, |
|
"learning_rate": 5.192307692307692e-06, |
|
"loss": 0.0184, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 16.492890995260662, |
|
"grad_norm": 0.09704640507698059, |
|
"learning_rate": 4.903846153846154e-06, |
|
"loss": 0.0191, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 16.682464454976305, |
|
"grad_norm": 0.10457386821508408, |
|
"learning_rate": 4.615384615384616e-06, |
|
"loss": 0.0197, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 16.872037914691944, |
|
"grad_norm": 0.08160708844661713, |
|
"learning_rate": 4.3269230769230765e-06, |
|
"loss": 0.0204, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 17.061611374407583, |
|
"grad_norm": 0.1239229142665863, |
|
"learning_rate": 4.0384615384615385e-06, |
|
"loss": 0.0208, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 17.251184834123222, |
|
"grad_norm": 0.0887596532702446, |
|
"learning_rate": 3.75e-06, |
|
"loss": 0.017, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 17.44075829383886, |
|
"grad_norm": 0.09922682493925095, |
|
"learning_rate": 3.4615384615384617e-06, |
|
"loss": 0.0223, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 17.6303317535545, |
|
"grad_norm": 0.0974949300289154, |
|
"learning_rate": 3.1730769230769233e-06, |
|
"loss": 0.0191, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 17.819905213270143, |
|
"grad_norm": 0.10244361311197281, |
|
"learning_rate": 2.884615384615385e-06, |
|
"loss": 0.0198, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 18.009478672985782, |
|
"grad_norm": 0.06859322637319565, |
|
"learning_rate": 2.596153846153846e-06, |
|
"loss": 0.0188, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 18.19905213270142, |
|
"grad_norm": 0.09902096539735794, |
|
"learning_rate": 2.307692307692308e-06, |
|
"loss": 0.0196, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 18.38862559241706, |
|
"grad_norm": 0.058132074773311615, |
|
"learning_rate": 2.0192307692307692e-06, |
|
"loss": 0.0174, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 18.578199052132703, |
|
"grad_norm": 0.11522402614355087, |
|
"learning_rate": 1.7307692307692308e-06, |
|
"loss": 0.0186, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 18.767772511848342, |
|
"grad_norm": 0.07329108566045761, |
|
"learning_rate": 1.4423076923076924e-06, |
|
"loss": 0.0203, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 18.95734597156398, |
|
"grad_norm": 0.08888276666402817, |
|
"learning_rate": 1.153846153846154e-06, |
|
"loss": 0.0182, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 19.14691943127962, |
|
"grad_norm": 0.16812676191329956, |
|
"learning_rate": 8.653846153846154e-07, |
|
"loss": 0.0201, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 19.33649289099526, |
|
"grad_norm": 0.051739402115345, |
|
"learning_rate": 5.76923076923077e-07, |
|
"loss": 0.0175, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 19.5260663507109, |
|
"grad_norm": 0.15502548217773438, |
|
"learning_rate": 2.884615384615385e-07, |
|
"loss": 0.0211, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 19.71563981042654, |
|
"grad_norm": 0.10121985524892807, |
|
"learning_rate": 0.0, |
|
"loss": 0.0202, |
|
"step": 1040 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1040, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.39069062991872e+19, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|