|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.7172314864622557, |
|
"eval_steps": 2000, |
|
"global_step": 2000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 5.03125, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 1.4133, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 9.0, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 1.5815, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 9e-07, |
|
"loss": 1.3232, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.875, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.3635, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.3467, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.4375, |
|
"learning_rate": 1.8e-06, |
|
"loss": 1.3232, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 1.3657, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.3112, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.53125, |
|
"learning_rate": 2.7e-06, |
|
"loss": 1.3052, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 3e-06, |
|
"loss": 1.3857, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 3.3e-06, |
|
"loss": 1.4426, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.875, |
|
"learning_rate": 3.6e-06, |
|
"loss": 1.2005, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 3.9e-06, |
|
"loss": 1.2581, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 1.3267, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.46875, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.3335, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.59375, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.2575, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 5.1e-06, |
|
"loss": 1.2955, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 5.4e-06, |
|
"loss": 1.1892, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 5.7000000000000005e-06, |
|
"loss": 1.1546, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.0, |
|
"learning_rate": 6e-06, |
|
"loss": 1.0558, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 6.3e-06, |
|
"loss": 1.0267, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 6.6e-06, |
|
"loss": 1.1234, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.0, |
|
"learning_rate": 6.900000000000001e-06, |
|
"loss": 1.0501, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 7.2e-06, |
|
"loss": 1.1591, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 7.5e-06, |
|
"loss": 0.9678, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 6.0625, |
|
"learning_rate": 7.8e-06, |
|
"loss": 1.1697, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 8.1e-06, |
|
"loss": 0.8972, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.78125, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 1.0512, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 8.7e-06, |
|
"loss": 0.9511, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 9e-06, |
|
"loss": 1.0023, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 9.3e-06, |
|
"loss": 1.0908, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.875, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 1.0276, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 9.9e-06, |
|
"loss": 0.9734, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.625, |
|
"learning_rate": 1.02e-05, |
|
"loss": 0.8525, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 1.05e-05, |
|
"loss": 1.0789, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.21875, |
|
"learning_rate": 1.08e-05, |
|
"loss": 0.981, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 1.11e-05, |
|
"loss": 1.0341, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 1.1400000000000001e-05, |
|
"loss": 0.8214, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 1.1700000000000001e-05, |
|
"loss": 0.9675, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.2041, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 1.2299999999999999e-05, |
|
"loss": 1.0061, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.328125, |
|
"learning_rate": 1.26e-05, |
|
"loss": 0.8642, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 1.29e-05, |
|
"loss": 1.0072, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 1.32e-05, |
|
"loss": 0.9096, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 1.002, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.78125, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 0.8901, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 1.4099999999999999e-05, |
|
"loss": 0.8891, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 1.44e-05, |
|
"loss": 0.9389, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 1.47e-05, |
|
"loss": 0.8838, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.0191, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 1.4984210526315789e-05, |
|
"loss": 0.9197, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.90625, |
|
"learning_rate": 1.496842105263158e-05, |
|
"loss": 0.9614, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 1.4952631578947368e-05, |
|
"loss": 0.8612, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 1.4936842105263158e-05, |
|
"loss": 0.91, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 1.4921052631578947e-05, |
|
"loss": 0.8851, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 1.4905263157894737e-05, |
|
"loss": 0.9448, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 1.4889473684210526e-05, |
|
"loss": 0.8091, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 1.4873684210526315e-05, |
|
"loss": 0.8932, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.375, |
|
"learning_rate": 1.4857894736842107e-05, |
|
"loss": 0.966, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.875, |
|
"learning_rate": 1.4842105263157895e-05, |
|
"loss": 0.8036, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 1.4826315789473686e-05, |
|
"loss": 0.9743, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 1.4810526315789474e-05, |
|
"loss": 0.8383, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 1.4794736842105265e-05, |
|
"loss": 0.8982, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 1.4778947368421053e-05, |
|
"loss": 0.8112, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 1.4763157894736842e-05, |
|
"loss": 0.8426, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 1.4747368421052632e-05, |
|
"loss": 0.8399, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.125, |
|
"learning_rate": 1.4731578947368421e-05, |
|
"loss": 0.9043, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.5, |
|
"learning_rate": 1.4715789473684211e-05, |
|
"loss": 0.8521, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 1.47e-05, |
|
"loss": 0.9079, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 1.468421052631579e-05, |
|
"loss": 0.7998, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 1.4668421052631579e-05, |
|
"loss": 0.8204, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 1.4652631578947367e-05, |
|
"loss": 0.8764, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 1.4636842105263158e-05, |
|
"loss": 0.9226, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 1.4621052631578946e-05, |
|
"loss": 0.8921, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 1.4605263157894737e-05, |
|
"loss": 0.8764, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 1.4589473684210527e-05, |
|
"loss": 0.8475, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 1.4573684210526317e-05, |
|
"loss": 0.9297, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 1.4557894736842106e-05, |
|
"loss": 0.9052, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 1.4542105263157895e-05, |
|
"loss": 0.8695, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 4.25, |
|
"learning_rate": 1.4526315789473685e-05, |
|
"loss": 0.9214, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 3.25, |
|
"learning_rate": 1.4510526315789474e-05, |
|
"loss": 0.8403, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 1.4494736842105264e-05, |
|
"loss": 0.918, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 5.53125, |
|
"learning_rate": 1.4478947368421053e-05, |
|
"loss": 0.8599, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.875, |
|
"learning_rate": 1.4463157894736843e-05, |
|
"loss": 0.954, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 1.4447368421052632e-05, |
|
"loss": 0.8568, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 1.443157894736842e-05, |
|
"loss": 0.9043, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 4.0, |
|
"learning_rate": 1.441578947368421e-05, |
|
"loss": 0.9108, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 1.44e-05, |
|
"loss": 0.8723, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 1.438421052631579e-05, |
|
"loss": 0.8514, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 1.4368421052631578e-05, |
|
"loss": 0.8584, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 1.4352631578947369e-05, |
|
"loss": 0.8763, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 1.4336842105263159e-05, |
|
"loss": 0.7777, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 1.4321052631578948e-05, |
|
"loss": 0.9353, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 3.4375, |
|
"learning_rate": 1.4305263157894738e-05, |
|
"loss": 0.8708, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 1.4289473684210527e-05, |
|
"loss": 0.8801, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 3.5, |
|
"learning_rate": 1.4273684210526317e-05, |
|
"loss": 0.8625, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 5.03125, |
|
"learning_rate": 1.4257894736842106e-05, |
|
"loss": 0.875, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 1.4242105263157896e-05, |
|
"loss": 0.9445, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 3.421875, |
|
"learning_rate": 1.4226315789473685e-05, |
|
"loss": 0.7317, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 1.4210526315789473e-05, |
|
"loss": 0.8303, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 1.4194736842105264e-05, |
|
"loss": 0.8433, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 1.4178947368421052e-05, |
|
"loss": 0.8713, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 1.4163157894736843e-05, |
|
"loss": 0.8411, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 1.4147368421052631e-05, |
|
"loss": 0.8757, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 1.4131578947368422e-05, |
|
"loss": 0.7574, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 1.411578947368421e-05, |
|
"loss": 0.8089, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 1.4099999999999999e-05, |
|
"loss": 0.7259, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 3.125, |
|
"learning_rate": 1.408421052631579e-05, |
|
"loss": 0.7544, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 3.375, |
|
"learning_rate": 1.406842105263158e-05, |
|
"loss": 0.9522, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 4.78125, |
|
"learning_rate": 1.405263157894737e-05, |
|
"loss": 0.8574, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 1.4036842105263158e-05, |
|
"loss": 0.9539, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 1.4021052631578949e-05, |
|
"loss": 0.8365, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 1.4005263157894737e-05, |
|
"loss": 0.7868, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 1.3989473684210526e-05, |
|
"loss": 0.9054, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 1.3973684210526316e-05, |
|
"loss": 0.841, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 1.3957894736842105e-05, |
|
"loss": 0.8132, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 1.3942105263157895e-05, |
|
"loss": 0.8678, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 3.4375, |
|
"learning_rate": 1.3926315789473684e-05, |
|
"loss": 0.8434, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 1.3910526315789474e-05, |
|
"loss": 0.7466, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 3.5625, |
|
"learning_rate": 1.3894736842105263e-05, |
|
"loss": 0.9028, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 1.3878947368421052e-05, |
|
"loss": 0.9181, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 1.3863157894736842e-05, |
|
"loss": 0.9427, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 1.384736842105263e-05, |
|
"loss": 0.7861, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 1.3831578947368421e-05, |
|
"loss": 0.8857, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 1.3815789473684211e-05, |
|
"loss": 0.8447, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 0.7963, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 1.378421052631579e-05, |
|
"loss": 0.7099, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 1.3768421052631579e-05, |
|
"loss": 0.8737, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 1.375263157894737e-05, |
|
"loss": 0.8896, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 3.640625, |
|
"learning_rate": 1.3736842105263158e-05, |
|
"loss": 0.8224, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 1.3721052631578948e-05, |
|
"loss": 0.7736, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 1.3705263157894737e-05, |
|
"loss": 0.8416, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 1.3689473684210527e-05, |
|
"loss": 0.7371, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 1.3673684210526316e-05, |
|
"loss": 0.9159, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.625, |
|
"learning_rate": 1.3657894736842106e-05, |
|
"loss": 0.8392, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 1.3642105263157895e-05, |
|
"loss": 0.7774, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 1.3626315789473684e-05, |
|
"loss": 0.9497, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 1.3610526315789474e-05, |
|
"loss": 0.8797, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 1.3594736842105263e-05, |
|
"loss": 0.8666, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 1.3578947368421053e-05, |
|
"loss": 0.9691, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 1.3563157894736842e-05, |
|
"loss": 0.796, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 1.3547368421052634e-05, |
|
"loss": 0.8152, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 1.3531578947368422e-05, |
|
"loss": 0.8702, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 1.3515789473684211e-05, |
|
"loss": 0.8582, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 0.8282, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 1.348421052631579e-05, |
|
"loss": 0.9256, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 1.346842105263158e-05, |
|
"loss": 0.9328, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 3.5, |
|
"learning_rate": 1.3452631578947369e-05, |
|
"loss": 0.8571, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 1.343684210526316e-05, |
|
"loss": 0.8293, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 1.3421052631578948e-05, |
|
"loss": 0.8282, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 1.3405263157894736e-05, |
|
"loss": 0.8202, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 1.3389473684210527e-05, |
|
"loss": 0.9545, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 4.625, |
|
"learning_rate": 1.3373684210526315e-05, |
|
"loss": 0.8083, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 1.3357894736842106e-05, |
|
"loss": 0.8865, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 3.875, |
|
"learning_rate": 1.3342105263157894e-05, |
|
"loss": 0.7867, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 1.3326315789473685e-05, |
|
"loss": 0.7882, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 1.3310526315789473e-05, |
|
"loss": 0.7837, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 5.125, |
|
"learning_rate": 1.3294736842105262e-05, |
|
"loss": 0.9198, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 3.4375, |
|
"learning_rate": 1.3278947368421054e-05, |
|
"loss": 0.849, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 1.3263157894736843e-05, |
|
"loss": 0.894, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 1.3247368421052633e-05, |
|
"loss": 0.8318, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 1.3231578947368422e-05, |
|
"loss": 0.7787, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 5.0, |
|
"learning_rate": 1.3215789473684212e-05, |
|
"loss": 0.8853, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 1.32e-05, |
|
"loss": 0.9163, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 1.318421052631579e-05, |
|
"loss": 0.8755, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 1.316842105263158e-05, |
|
"loss": 0.8891, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 1.3152631578947368e-05, |
|
"loss": 0.8621, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.28125, |
|
"learning_rate": 1.3136842105263159e-05, |
|
"loss": 0.8613, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 1.3121052631578947e-05, |
|
"loss": 0.8886, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 1.3105263157894738e-05, |
|
"loss": 0.843, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 3.75, |
|
"learning_rate": 1.3089473684210526e-05, |
|
"loss": 0.8877, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 1.3073684210526315e-05, |
|
"loss": 0.8785, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 1.3057894736842105e-05, |
|
"loss": 0.8878, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 1.3042105263157894e-05, |
|
"loss": 0.8794, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 4.75, |
|
"learning_rate": 1.3026315789473684e-05, |
|
"loss": 0.8821, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 1.3010526315789475e-05, |
|
"loss": 0.8146, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 1.2994736842105265e-05, |
|
"loss": 0.8322, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 1.2978947368421054e-05, |
|
"loss": 0.8197, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 4.59375, |
|
"learning_rate": 1.2963157894736842e-05, |
|
"loss": 0.8883, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 1.2947368421052633e-05, |
|
"loss": 0.9259, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.2931578947368421e-05, |
|
"loss": 0.7667, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 1.2915789473684212e-05, |
|
"loss": 0.9434, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 1.29e-05, |
|
"loss": 0.8081, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 3.875, |
|
"learning_rate": 1.288421052631579e-05, |
|
"loss": 0.767, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 4.53125, |
|
"learning_rate": 1.2868421052631579e-05, |
|
"loss": 0.8339, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 4.0, |
|
"learning_rate": 1.2852631578947368e-05, |
|
"loss": 0.8083, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 1.2836842105263158e-05, |
|
"loss": 0.8255, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 1.2821052631578947e-05, |
|
"loss": 0.8618, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 1.2805263157894737e-05, |
|
"loss": 0.913, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.2789473684210526e-05, |
|
"loss": 0.7233, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 1.2773684210526316e-05, |
|
"loss": 0.839, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 1.2757894736842106e-05, |
|
"loss": 0.9282, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 1.2742105263157895e-05, |
|
"loss": 0.8012, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 1.2726315789473685e-05, |
|
"loss": 0.7934, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 1.2710526315789474e-05, |
|
"loss": 0.8342, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 1.2694736842105264e-05, |
|
"loss": 0.8319, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.4375, |
|
"learning_rate": 1.2678947368421053e-05, |
|
"loss": 0.855, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 1.2663157894736843e-05, |
|
"loss": 0.8794, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 1.2647368421052632e-05, |
|
"loss": 0.8106, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 1.263157894736842e-05, |
|
"loss": 0.7944, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"eval_loss": 0.964277446269989, |
|
"eval_runtime": 64.3098, |
|
"eval_samples_per_second": 15.55, |
|
"eval_steps_per_second": 15.55, |
|
"step": 2000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 10000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 2000, |
|
"total_flos": 3.227844083712e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|