|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.2, |
|
"eval_steps": 2000, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 20.375, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 1.7815, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 19.125, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 1.84, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 17.0, |
|
"learning_rate": 9e-07, |
|
"loss": 2.098, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 20.125, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.67, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 18.0, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.8452, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 19.25, |
|
"learning_rate": 1.8e-06, |
|
"loss": 2.1664, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 17.25, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 1.6483, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 18.625, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.9371, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 16.75, |
|
"learning_rate": 2.7e-06, |
|
"loss": 1.8967, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 14.3125, |
|
"learning_rate": 3e-06, |
|
"loss": 1.8326, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 15.9375, |
|
"learning_rate": 3.3e-06, |
|
"loss": 1.9072, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 21.5, |
|
"learning_rate": 3.6e-06, |
|
"loss": 1.9562, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 21.75, |
|
"learning_rate": 3.9e-06, |
|
"loss": 1.6976, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 16.0, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 2.2788, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 17.625, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.8362, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 22.5, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.8372, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 16.75, |
|
"learning_rate": 5.1e-06, |
|
"loss": 1.9134, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 21.875, |
|
"learning_rate": 5.4e-06, |
|
"loss": 1.8973, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 15.8125, |
|
"learning_rate": 5.7000000000000005e-06, |
|
"loss": 1.6868, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 19.75, |
|
"learning_rate": 6e-06, |
|
"loss": 1.6079, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 19.5, |
|
"learning_rate": 6.3e-06, |
|
"loss": 1.4638, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 18.625, |
|
"learning_rate": 6.6e-06, |
|
"loss": 1.8714, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 24.25, |
|
"learning_rate": 6.900000000000001e-06, |
|
"loss": 1.9379, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 13.0, |
|
"learning_rate": 7.2e-06, |
|
"loss": 1.364, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 17.75, |
|
"learning_rate": 7.5e-06, |
|
"loss": 1.8867, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 14.0, |
|
"learning_rate": 7.8e-06, |
|
"loss": 1.9215, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 23.5, |
|
"learning_rate": 8.1e-06, |
|
"loss": 1.8065, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 17.25, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 1.6864, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 25.25, |
|
"learning_rate": 8.7e-06, |
|
"loss": 1.6924, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 25.0, |
|
"learning_rate": 9e-06, |
|
"loss": 1.7671, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 18.375, |
|
"learning_rate": 9.3e-06, |
|
"loss": 1.8781, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 20.5, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 1.454, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 14.875, |
|
"learning_rate": 9.9e-06, |
|
"loss": 1.2016, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 23.625, |
|
"learning_rate": 1.02e-05, |
|
"loss": 1.6703, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 26.125, |
|
"learning_rate": 1.05e-05, |
|
"loss": 1.3712, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 18.375, |
|
"learning_rate": 1.08e-05, |
|
"loss": 1.3751, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 17.625, |
|
"learning_rate": 1.11e-05, |
|
"loss": 1.7888, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 34.25, |
|
"learning_rate": 1.1400000000000001e-05, |
|
"loss": 1.5242, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 22.5, |
|
"learning_rate": 1.1700000000000001e-05, |
|
"loss": 1.5371, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 31.375, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.3818, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 13.375, |
|
"learning_rate": 1.2299999999999999e-05, |
|
"loss": 1.9301, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 17.25, |
|
"learning_rate": 1.26e-05, |
|
"loss": 1.7813, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 22.125, |
|
"learning_rate": 1.29e-05, |
|
"loss": 1.6075, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 25.125, |
|
"learning_rate": 1.32e-05, |
|
"loss": 1.9157, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 16.375, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 1.6057, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 19.0, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 1.6342, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 13.625, |
|
"learning_rate": 1.4099999999999999e-05, |
|
"loss": 1.4012, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 14.0625, |
|
"learning_rate": 1.44e-05, |
|
"loss": 1.853, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 21.375, |
|
"learning_rate": 1.47e-05, |
|
"loss": 1.6546, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 20.0, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.3404, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 18.25, |
|
"learning_rate": 1.4984210526315789e-05, |
|
"loss": 1.5801, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 27.5, |
|
"learning_rate": 1.496842105263158e-05, |
|
"loss": 1.5567, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 28.25, |
|
"learning_rate": 1.4952631578947368e-05, |
|
"loss": 1.7417, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 27.0, |
|
"learning_rate": 1.4936842105263158e-05, |
|
"loss": 1.6283, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 25.5, |
|
"learning_rate": 1.4921052631578947e-05, |
|
"loss": 1.8534, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 11.625, |
|
"learning_rate": 1.4905263157894737e-05, |
|
"loss": 1.668, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 39.75, |
|
"learning_rate": 1.4889473684210526e-05, |
|
"loss": 1.4711, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 20.125, |
|
"learning_rate": 1.4873684210526315e-05, |
|
"loss": 1.4484, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 15.3125, |
|
"learning_rate": 1.4857894736842107e-05, |
|
"loss": 1.6434, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 21.375, |
|
"learning_rate": 1.4842105263157895e-05, |
|
"loss": 1.8766, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 22.125, |
|
"learning_rate": 1.4826315789473686e-05, |
|
"loss": 1.329, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 22.5, |
|
"learning_rate": 1.4810526315789474e-05, |
|
"loss": 1.5956, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 9.125, |
|
"learning_rate": 1.4794736842105265e-05, |
|
"loss": 1.5795, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 15.25, |
|
"learning_rate": 1.4778947368421053e-05, |
|
"loss": 1.7082, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 25.0, |
|
"learning_rate": 1.4763157894736842e-05, |
|
"loss": 1.7773, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 25.5, |
|
"learning_rate": 1.4747368421052632e-05, |
|
"loss": 1.3858, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 19.75, |
|
"learning_rate": 1.4731578947368421e-05, |
|
"loss": 1.6927, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 10.75, |
|
"learning_rate": 1.4715789473684211e-05, |
|
"loss": 1.5281, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 30.125, |
|
"learning_rate": 1.47e-05, |
|
"loss": 1.3842, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 41.5, |
|
"learning_rate": 1.468421052631579e-05, |
|
"loss": 1.7584, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 18.875, |
|
"learning_rate": 1.4668421052631579e-05, |
|
"loss": 1.5485, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 35.25, |
|
"learning_rate": 1.4652631578947367e-05, |
|
"loss": 1.61, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 19.25, |
|
"learning_rate": 1.4636842105263158e-05, |
|
"loss": 1.6709, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 17.5, |
|
"learning_rate": 1.4621052631578946e-05, |
|
"loss": 1.4464, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 18.625, |
|
"learning_rate": 1.4605263157894737e-05, |
|
"loss": 1.5036, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 22.25, |
|
"learning_rate": 1.4589473684210527e-05, |
|
"loss": 1.6983, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 32.5, |
|
"learning_rate": 1.4573684210526317e-05, |
|
"loss": 1.4551, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 18.625, |
|
"learning_rate": 1.4557894736842106e-05, |
|
"loss": 1.2903, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 25.875, |
|
"learning_rate": 1.4542105263157895e-05, |
|
"loss": 1.5937, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 24.125, |
|
"learning_rate": 1.4526315789473685e-05, |
|
"loss": 1.5994, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 26.375, |
|
"learning_rate": 1.4510526315789474e-05, |
|
"loss": 1.4018, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 18.75, |
|
"learning_rate": 1.4494736842105264e-05, |
|
"loss": 1.1078, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 19.25, |
|
"learning_rate": 1.4478947368421053e-05, |
|
"loss": 1.5669, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 16.75, |
|
"learning_rate": 1.4463157894736843e-05, |
|
"loss": 1.325, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 18.5, |
|
"learning_rate": 1.4447368421052632e-05, |
|
"loss": 1.3645, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 26.125, |
|
"learning_rate": 1.443157894736842e-05, |
|
"loss": 1.5002, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 22.875, |
|
"learning_rate": 1.441578947368421e-05, |
|
"loss": 1.3608, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 20.625, |
|
"learning_rate": 1.44e-05, |
|
"loss": 1.5648, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 32.0, |
|
"learning_rate": 1.438421052631579e-05, |
|
"loss": 1.1969, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 18.25, |
|
"learning_rate": 1.4368421052631578e-05, |
|
"loss": 1.5523, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 27.5, |
|
"learning_rate": 1.4352631578947369e-05, |
|
"loss": 1.5062, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 14.75, |
|
"learning_rate": 1.4336842105263159e-05, |
|
"loss": 1.5052, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 12.8125, |
|
"learning_rate": 1.4321052631578948e-05, |
|
"loss": 1.6696, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 18.0, |
|
"learning_rate": 1.4305263157894738e-05, |
|
"loss": 1.2593, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 22.0, |
|
"learning_rate": 1.4289473684210527e-05, |
|
"loss": 0.9601, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 17.5, |
|
"learning_rate": 1.4273684210526317e-05, |
|
"loss": 1.3436, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 29.25, |
|
"learning_rate": 1.4257894736842106e-05, |
|
"loss": 1.9493, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 14.375, |
|
"learning_rate": 1.4242105263157896e-05, |
|
"loss": 1.3317, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 12.625, |
|
"learning_rate": 1.4226315789473685e-05, |
|
"loss": 1.263, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 25.625, |
|
"learning_rate": 1.4210526315789473e-05, |
|
"loss": 1.4426, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 27.75, |
|
"learning_rate": 1.4194736842105264e-05, |
|
"loss": 1.738, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 20.875, |
|
"learning_rate": 1.4178947368421052e-05, |
|
"loss": 1.474, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 23.25, |
|
"learning_rate": 1.4163157894736843e-05, |
|
"loss": 1.8161, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 18.25, |
|
"learning_rate": 1.4147368421052631e-05, |
|
"loss": 1.2116, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 25.25, |
|
"learning_rate": 1.4131578947368422e-05, |
|
"loss": 1.7062, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 21.125, |
|
"learning_rate": 1.411578947368421e-05, |
|
"loss": 1.5641, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 15.625, |
|
"learning_rate": 1.4099999999999999e-05, |
|
"loss": 1.5193, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 20.75, |
|
"learning_rate": 1.408421052631579e-05, |
|
"loss": 1.4775, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 16.375, |
|
"learning_rate": 1.406842105263158e-05, |
|
"loss": 1.4353, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 29.375, |
|
"learning_rate": 1.405263157894737e-05, |
|
"loss": 1.6741, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 36.5, |
|
"learning_rate": 1.4036842105263158e-05, |
|
"loss": 1.4158, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 20.5, |
|
"learning_rate": 1.4021052631578949e-05, |
|
"loss": 1.2708, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 19.25, |
|
"learning_rate": 1.4005263157894737e-05, |
|
"loss": 1.1376, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 18.5, |
|
"learning_rate": 1.3989473684210526e-05, |
|
"loss": 1.3171, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 21.375, |
|
"learning_rate": 1.3973684210526316e-05, |
|
"loss": 1.5621, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 19.5, |
|
"learning_rate": 1.3957894736842105e-05, |
|
"loss": 1.8743, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 13.4375, |
|
"learning_rate": 1.3942105263157895e-05, |
|
"loss": 1.5132, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 22.5, |
|
"learning_rate": 1.3926315789473684e-05, |
|
"loss": 1.6475, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 13.25, |
|
"learning_rate": 1.3910526315789474e-05, |
|
"loss": 1.4806, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 19.0, |
|
"learning_rate": 1.3894736842105263e-05, |
|
"loss": 1.5761, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 17.125, |
|
"learning_rate": 1.3878947368421052e-05, |
|
"loss": 1.88, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 19.0, |
|
"learning_rate": 1.3863157894736842e-05, |
|
"loss": 1.3399, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 21.25, |
|
"learning_rate": 1.384736842105263e-05, |
|
"loss": 1.6421, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 19.625, |
|
"learning_rate": 1.3831578947368421e-05, |
|
"loss": 1.7212, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 17.75, |
|
"learning_rate": 1.3815789473684211e-05, |
|
"loss": 1.6537, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 13.5625, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 1.6197, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 18.875, |
|
"learning_rate": 1.378421052631579e-05, |
|
"loss": 1.5681, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 40.75, |
|
"learning_rate": 1.3768421052631579e-05, |
|
"loss": 1.3511, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 13.0, |
|
"learning_rate": 1.375263157894737e-05, |
|
"loss": 1.3007, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 10.9375, |
|
"learning_rate": 1.3736842105263158e-05, |
|
"loss": 1.18, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 19.125, |
|
"learning_rate": 1.3721052631578948e-05, |
|
"loss": 1.6356, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 16.375, |
|
"learning_rate": 1.3705263157894737e-05, |
|
"loss": 1.4107, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 26.0, |
|
"learning_rate": 1.3689473684210527e-05, |
|
"loss": 1.3306, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 22.5, |
|
"learning_rate": 1.3673684210526316e-05, |
|
"loss": 1.5359, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 20.0, |
|
"learning_rate": 1.3657894736842106e-05, |
|
"loss": 1.404, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 19.125, |
|
"learning_rate": 1.3642105263157895e-05, |
|
"loss": 1.2873, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 11.1875, |
|
"learning_rate": 1.3626315789473684e-05, |
|
"loss": 1.3952, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 22.0, |
|
"learning_rate": 1.3610526315789474e-05, |
|
"loss": 1.4608, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 20.875, |
|
"learning_rate": 1.3594736842105263e-05, |
|
"loss": 1.5216, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 18.5, |
|
"learning_rate": 1.3578947368421053e-05, |
|
"loss": 1.4667, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 31.75, |
|
"learning_rate": 1.3563157894736842e-05, |
|
"loss": 1.6675, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 22.125, |
|
"learning_rate": 1.3547368421052634e-05, |
|
"loss": 1.4568, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 15.75, |
|
"learning_rate": 1.3531578947368422e-05, |
|
"loss": 1.4352, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 19.75, |
|
"learning_rate": 1.3515789473684211e-05, |
|
"loss": 1.4362, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 29.5, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 1.2622, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 13.75, |
|
"learning_rate": 1.348421052631579e-05, |
|
"loss": 1.4709, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 18.25, |
|
"learning_rate": 1.346842105263158e-05, |
|
"loss": 1.3638, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 22.375, |
|
"learning_rate": 1.3452631578947369e-05, |
|
"loss": 1.7496, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 24.625, |
|
"learning_rate": 1.343684210526316e-05, |
|
"loss": 1.7741, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 19.0, |
|
"learning_rate": 1.3421052631578948e-05, |
|
"loss": 1.3499, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 31.625, |
|
"learning_rate": 1.3405263157894736e-05, |
|
"loss": 1.3528, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 25.75, |
|
"learning_rate": 1.3389473684210527e-05, |
|
"loss": 1.4893, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 12.3125, |
|
"learning_rate": 1.3373684210526315e-05, |
|
"loss": 1.4222, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 23.0, |
|
"learning_rate": 1.3357894736842106e-05, |
|
"loss": 1.7875, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 30.375, |
|
"learning_rate": 1.3342105263157894e-05, |
|
"loss": 1.426, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 14.25, |
|
"learning_rate": 1.3326315789473685e-05, |
|
"loss": 1.8679, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 19.875, |
|
"learning_rate": 1.3310526315789473e-05, |
|
"loss": 1.4155, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 29.125, |
|
"learning_rate": 1.3294736842105262e-05, |
|
"loss": 1.7515, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 18.125, |
|
"learning_rate": 1.3278947368421054e-05, |
|
"loss": 1.4494, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 18.75, |
|
"learning_rate": 1.3263157894736843e-05, |
|
"loss": 1.4159, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 24.75, |
|
"learning_rate": 1.3247368421052633e-05, |
|
"loss": 1.5582, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 24.25, |
|
"learning_rate": 1.3231578947368422e-05, |
|
"loss": 1.5761, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 30.375, |
|
"learning_rate": 1.3215789473684212e-05, |
|
"loss": 1.5033, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 16.75, |
|
"learning_rate": 1.32e-05, |
|
"loss": 1.4209, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 22.25, |
|
"learning_rate": 1.318421052631579e-05, |
|
"loss": 1.5761, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 25.0, |
|
"learning_rate": 1.316842105263158e-05, |
|
"loss": 1.4146, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 20.375, |
|
"learning_rate": 1.3152631578947368e-05, |
|
"loss": 1.2064, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 25.75, |
|
"learning_rate": 1.3136842105263159e-05, |
|
"loss": 1.1254, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 20.0, |
|
"learning_rate": 1.3121052631578947e-05, |
|
"loss": 1.5665, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 23.625, |
|
"learning_rate": 1.3105263157894738e-05, |
|
"loss": 1.5582, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 22.875, |
|
"learning_rate": 1.3089473684210526e-05, |
|
"loss": 1.2198, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 15.875, |
|
"learning_rate": 1.3073684210526315e-05, |
|
"loss": 1.4875, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 22.25, |
|
"learning_rate": 1.3057894736842105e-05, |
|
"loss": 1.3077, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 13.125, |
|
"learning_rate": 1.3042105263157894e-05, |
|
"loss": 1.571, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 16.875, |
|
"learning_rate": 1.3026315789473684e-05, |
|
"loss": 1.2261, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 12.0, |
|
"learning_rate": 1.3010526315789475e-05, |
|
"loss": 1.1795, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 23.0, |
|
"learning_rate": 1.2994736842105265e-05, |
|
"loss": 1.2311, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 10.4375, |
|
"learning_rate": 1.2978947368421054e-05, |
|
"loss": 1.5966, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 18.75, |
|
"learning_rate": 1.2963157894736842e-05, |
|
"loss": 1.5122, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 10.5, |
|
"learning_rate": 1.2947368421052633e-05, |
|
"loss": 1.2532, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 9.6875, |
|
"learning_rate": 1.2931578947368421e-05, |
|
"loss": 1.3394, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 19.375, |
|
"learning_rate": 1.2915789473684212e-05, |
|
"loss": 1.6067, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 23.0, |
|
"learning_rate": 1.29e-05, |
|
"loss": 1.4977, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 16.375, |
|
"learning_rate": 1.288421052631579e-05, |
|
"loss": 1.4339, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 19.625, |
|
"learning_rate": 1.2868421052631579e-05, |
|
"loss": 1.4789, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 11.125, |
|
"learning_rate": 1.2852631578947368e-05, |
|
"loss": 1.3857, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 9.3125, |
|
"learning_rate": 1.2836842105263158e-05, |
|
"loss": 1.4344, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 21.5, |
|
"learning_rate": 1.2821052631578947e-05, |
|
"loss": 1.416, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 23.875, |
|
"learning_rate": 1.2805263157894737e-05, |
|
"loss": 1.4628, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 16.5, |
|
"learning_rate": 1.2789473684210526e-05, |
|
"loss": 1.3098, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 21.375, |
|
"learning_rate": 1.2773684210526316e-05, |
|
"loss": 1.4423, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 18.625, |
|
"learning_rate": 1.2757894736842106e-05, |
|
"loss": 1.1756, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 24.625, |
|
"learning_rate": 1.2742105263157895e-05, |
|
"loss": 1.3788, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 14.1875, |
|
"learning_rate": 1.2726315789473685e-05, |
|
"loss": 1.2287, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 24.25, |
|
"learning_rate": 1.2710526315789474e-05, |
|
"loss": 1.1927, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 14.625, |
|
"learning_rate": 1.2694736842105264e-05, |
|
"loss": 1.5478, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 11.4375, |
|
"learning_rate": 1.2678947368421053e-05, |
|
"loss": 1.2426, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 30.25, |
|
"learning_rate": 1.2663157894736843e-05, |
|
"loss": 1.1653, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 21.875, |
|
"learning_rate": 1.2647368421052632e-05, |
|
"loss": 1.3693, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 14.3125, |
|
"learning_rate": 1.263157894736842e-05, |
|
"loss": 1.7071, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"eval_loss": 1.4587359428405762, |
|
"eval_runtime": 31.2193, |
|
"eval_samples_per_second": 32.032, |
|
"eval_steps_per_second": 32.032, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 12.25, |
|
"learning_rate": 1.2615789473684211e-05, |
|
"loss": 1.1877, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 33.25, |
|
"learning_rate": 1.26e-05, |
|
"loss": 1.6576, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 17.0, |
|
"learning_rate": 1.258421052631579e-05, |
|
"loss": 1.3037, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 17.625, |
|
"learning_rate": 1.2568421052631579e-05, |
|
"loss": 1.2634, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 18.5, |
|
"learning_rate": 1.2552631578947369e-05, |
|
"loss": 1.5936, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 23.625, |
|
"learning_rate": 1.2536842105263158e-05, |
|
"loss": 1.1655, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 12.625, |
|
"learning_rate": 1.2521052631578946e-05, |
|
"loss": 1.3446, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 7.34375, |
|
"learning_rate": 1.2505263157894737e-05, |
|
"loss": 1.5295, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 12.4375, |
|
"learning_rate": 1.2489473684210527e-05, |
|
"loss": 1.5361, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 15.9375, |
|
"learning_rate": 1.2473684210526317e-05, |
|
"loss": 1.3638, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 15.4375, |
|
"learning_rate": 1.2457894736842106e-05, |
|
"loss": 1.472, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 22.0, |
|
"learning_rate": 1.2442105263157896e-05, |
|
"loss": 1.0827, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 12.0625, |
|
"learning_rate": 1.2426315789473685e-05, |
|
"loss": 1.2359, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 12.75, |
|
"learning_rate": 1.2410526315789474e-05, |
|
"loss": 1.4345, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 19.5, |
|
"learning_rate": 1.2394736842105264e-05, |
|
"loss": 1.5083, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 29.125, |
|
"learning_rate": 1.2378947368421053e-05, |
|
"loss": 1.2703, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 19.0, |
|
"learning_rate": 1.2363157894736843e-05, |
|
"loss": 1.2485, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 30.0, |
|
"learning_rate": 1.2347368421052631e-05, |
|
"loss": 0.992, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 27.625, |
|
"learning_rate": 1.2331578947368422e-05, |
|
"loss": 1.5069, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 23.375, |
|
"learning_rate": 1.231578947368421e-05, |
|
"loss": 1.5471, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 18.5, |
|
"learning_rate": 1.2299999999999999e-05, |
|
"loss": 1.3682, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 17.125, |
|
"learning_rate": 1.228421052631579e-05, |
|
"loss": 1.5041, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 15.875, |
|
"learning_rate": 1.2268421052631578e-05, |
|
"loss": 1.5487, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 29.75, |
|
"learning_rate": 1.2252631578947368e-05, |
|
"loss": 1.2019, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 12.625, |
|
"learning_rate": 1.2236842105263159e-05, |
|
"loss": 1.4014, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 16.875, |
|
"learning_rate": 1.2221052631578949e-05, |
|
"loss": 1.3086, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 15.5, |
|
"learning_rate": 1.2205263157894738e-05, |
|
"loss": 1.6796, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 18.75, |
|
"learning_rate": 1.2189473684210526e-05, |
|
"loss": 1.2801, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 9.75, |
|
"learning_rate": 1.2173684210526317e-05, |
|
"loss": 1.1822, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 27.75, |
|
"learning_rate": 1.2157894736842105e-05, |
|
"loss": 1.428, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 21.125, |
|
"learning_rate": 1.2142105263157896e-05, |
|
"loss": 1.4445, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 20.5, |
|
"learning_rate": 1.2126315789473684e-05, |
|
"loss": 1.745, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 14.3125, |
|
"learning_rate": 1.2110526315789475e-05, |
|
"loss": 1.8124, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 31.0, |
|
"learning_rate": 1.2094736842105263e-05, |
|
"loss": 1.3533, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 11.5, |
|
"learning_rate": 1.2078947368421052e-05, |
|
"loss": 1.4255, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 17.5, |
|
"learning_rate": 1.2063157894736842e-05, |
|
"loss": 1.3926, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 13.75, |
|
"learning_rate": 1.2047368421052631e-05, |
|
"loss": 1.295, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 16.375, |
|
"learning_rate": 1.2031578947368421e-05, |
|
"loss": 1.0575, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 30.5, |
|
"learning_rate": 1.201578947368421e-05, |
|
"loss": 1.4961, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 17.5, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.3838, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 16.75, |
|
"learning_rate": 1.1984210526315789e-05, |
|
"loss": 1.5833, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 22.125, |
|
"learning_rate": 1.196842105263158e-05, |
|
"loss": 1.2771, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 19.125, |
|
"learning_rate": 1.195263157894737e-05, |
|
"loss": 1.6264, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 19.5, |
|
"learning_rate": 1.1936842105263158e-05, |
|
"loss": 1.4304, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 11.1875, |
|
"learning_rate": 1.1921052631578949e-05, |
|
"loss": 1.1868, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 39.25, |
|
"learning_rate": 1.1905263157894737e-05, |
|
"loss": 1.4066, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 24.5, |
|
"learning_rate": 1.1889473684210528e-05, |
|
"loss": 1.3723, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 21.75, |
|
"learning_rate": 1.1873684210526316e-05, |
|
"loss": 1.507, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 22.625, |
|
"learning_rate": 1.1857894736842105e-05, |
|
"loss": 1.4822, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 23.25, |
|
"learning_rate": 1.1842105263157895e-05, |
|
"loss": 1.3431, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 14.25, |
|
"learning_rate": 1.1826315789473684e-05, |
|
"loss": 1.438, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 38.0, |
|
"learning_rate": 1.1810526315789474e-05, |
|
"loss": 1.3837, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 17.625, |
|
"learning_rate": 1.1794736842105263e-05, |
|
"loss": 1.3997, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 13.375, |
|
"learning_rate": 1.1778947368421053e-05, |
|
"loss": 1.5068, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 26.75, |
|
"learning_rate": 1.1763157894736842e-05, |
|
"loss": 1.5514, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 23.375, |
|
"learning_rate": 1.174736842105263e-05, |
|
"loss": 1.4679, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 12.6875, |
|
"learning_rate": 1.173157894736842e-05, |
|
"loss": 1.3298, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 16.875, |
|
"learning_rate": 1.171578947368421e-05, |
|
"loss": 1.2123, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 21.625, |
|
"learning_rate": 1.1700000000000001e-05, |
|
"loss": 1.3493, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 25.75, |
|
"learning_rate": 1.168421052631579e-05, |
|
"loss": 1.3984, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 20.5, |
|
"learning_rate": 1.166842105263158e-05, |
|
"loss": 1.435, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 23.0, |
|
"learning_rate": 1.1652631578947369e-05, |
|
"loss": 1.273, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 15.75, |
|
"learning_rate": 1.1636842105263158e-05, |
|
"loss": 1.2547, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 7.71875, |
|
"learning_rate": 1.1621052631578948e-05, |
|
"loss": 1.1227, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 29.125, |
|
"learning_rate": 1.1605263157894737e-05, |
|
"loss": 1.3045, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 20.25, |
|
"learning_rate": 1.1589473684210527e-05, |
|
"loss": 1.4858, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 14.4375, |
|
"learning_rate": 1.1573684210526316e-05, |
|
"loss": 1.1192, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 22.875, |
|
"learning_rate": 1.1557894736842106e-05, |
|
"loss": 1.4049, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 14.25, |
|
"learning_rate": 1.1542105263157895e-05, |
|
"loss": 1.3724, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 24.25, |
|
"learning_rate": 1.1526315789473683e-05, |
|
"loss": 1.3053, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 23.0, |
|
"learning_rate": 1.1510526315789474e-05, |
|
"loss": 1.3986, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 20.0, |
|
"learning_rate": 1.1494736842105262e-05, |
|
"loss": 1.5444, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 16.25, |
|
"learning_rate": 1.1478947368421053e-05, |
|
"loss": 1.6835, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 19.375, |
|
"learning_rate": 1.1463157894736841e-05, |
|
"loss": 1.2312, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 13.0625, |
|
"learning_rate": 1.1447368421052632e-05, |
|
"loss": 1.0698, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 19.875, |
|
"learning_rate": 1.1431578947368422e-05, |
|
"loss": 1.4871, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 10.875, |
|
"learning_rate": 1.141578947368421e-05, |
|
"loss": 1.257, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 11.8125, |
|
"learning_rate": 1.1400000000000001e-05, |
|
"loss": 1.5711, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 22.125, |
|
"learning_rate": 1.138421052631579e-05, |
|
"loss": 1.592, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 25.5, |
|
"learning_rate": 1.136842105263158e-05, |
|
"loss": 0.9352, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 17.625, |
|
"learning_rate": 1.1352631578947369e-05, |
|
"loss": 1.3347, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 26.75, |
|
"learning_rate": 1.1336842105263159e-05, |
|
"loss": 1.0488, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 18.75, |
|
"learning_rate": 1.1321052631578948e-05, |
|
"loss": 1.7698, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 26.0, |
|
"learning_rate": 1.1305263157894736e-05, |
|
"loss": 1.2135, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 19.125, |
|
"learning_rate": 1.1289473684210527e-05, |
|
"loss": 1.5619, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 20.125, |
|
"learning_rate": 1.1273684210526315e-05, |
|
"loss": 1.5739, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 51.25, |
|
"learning_rate": 1.1257894736842106e-05, |
|
"loss": 1.3658, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 16.75, |
|
"learning_rate": 1.1242105263157894e-05, |
|
"loss": 1.6634, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 16.375, |
|
"learning_rate": 1.1226315789473685e-05, |
|
"loss": 1.1902, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 20.375, |
|
"learning_rate": 1.1210526315789473e-05, |
|
"loss": 1.2131, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 18.875, |
|
"learning_rate": 1.1194736842105264e-05, |
|
"loss": 1.4465, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 30.75, |
|
"learning_rate": 1.1178947368421054e-05, |
|
"loss": 1.6064, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 15.1875, |
|
"learning_rate": 1.1163157894736842e-05, |
|
"loss": 1.5864, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 23.25, |
|
"learning_rate": 1.1147368421052633e-05, |
|
"loss": 1.2329, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 22.375, |
|
"learning_rate": 1.1131578947368421e-05, |
|
"loss": 1.3795, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 24.0, |
|
"learning_rate": 1.1115789473684212e-05, |
|
"loss": 1.4095, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 19.0, |
|
"learning_rate": 1.11e-05, |
|
"loss": 1.1682, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 26.0, |
|
"learning_rate": 1.108421052631579e-05, |
|
"loss": 1.1633, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 18.75, |
|
"learning_rate": 1.106842105263158e-05, |
|
"loss": 1.4457, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 25.75, |
|
"learning_rate": 1.1052631578947368e-05, |
|
"loss": 1.4336, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 26.625, |
|
"learning_rate": 1.1036842105263158e-05, |
|
"loss": 1.1143, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 16.5, |
|
"learning_rate": 1.1021052631578947e-05, |
|
"loss": 1.2105, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 38.75, |
|
"learning_rate": 1.1005263157894737e-05, |
|
"loss": 1.6449, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 23.375, |
|
"learning_rate": 1.0989473684210526e-05, |
|
"loss": 1.3151, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 15.3125, |
|
"learning_rate": 1.0973684210526316e-05, |
|
"loss": 1.6362, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 24.625, |
|
"learning_rate": 1.0957894736842105e-05, |
|
"loss": 1.6158, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 22.125, |
|
"learning_rate": 1.0942105263157894e-05, |
|
"loss": 1.6301, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 24.625, |
|
"learning_rate": 1.0926315789473684e-05, |
|
"loss": 1.4099, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 28.625, |
|
"learning_rate": 1.0910526315789474e-05, |
|
"loss": 1.6574, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 18.75, |
|
"learning_rate": 1.0894736842105265e-05, |
|
"loss": 1.4458, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 16.25, |
|
"learning_rate": 1.0878947368421053e-05, |
|
"loss": 1.4384, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 17.5, |
|
"learning_rate": 1.0863157894736844e-05, |
|
"loss": 1.4054, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 13.5625, |
|
"learning_rate": 1.0847368421052632e-05, |
|
"loss": 1.1473, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 20.5, |
|
"learning_rate": 1.0831578947368421e-05, |
|
"loss": 1.2524, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 22.375, |
|
"learning_rate": 1.0815789473684211e-05, |
|
"loss": 1.1765, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 17.375, |
|
"learning_rate": 1.08e-05, |
|
"loss": 1.315, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 33.75, |
|
"learning_rate": 1.078421052631579e-05, |
|
"loss": 1.6893, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 17.375, |
|
"learning_rate": 1.0768421052631579e-05, |
|
"loss": 1.3754, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 24.125, |
|
"learning_rate": 1.075263157894737e-05, |
|
"loss": 1.0585, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 26.75, |
|
"learning_rate": 1.0736842105263158e-05, |
|
"loss": 1.5381, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 21.25, |
|
"learning_rate": 1.0721052631578947e-05, |
|
"loss": 1.2503, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 21.625, |
|
"learning_rate": 1.0705263157894737e-05, |
|
"loss": 1.4186, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 13.5625, |
|
"learning_rate": 1.0689473684210526e-05, |
|
"loss": 1.5066, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 25.375, |
|
"learning_rate": 1.0673684210526316e-05, |
|
"loss": 1.2001, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 21.5, |
|
"learning_rate": 1.0657894736842106e-05, |
|
"loss": 1.4222, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 20.25, |
|
"learning_rate": 1.0642105263157897e-05, |
|
"loss": 1.556, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 18.125, |
|
"learning_rate": 1.0626315789473685e-05, |
|
"loss": 1.1219, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 20.125, |
|
"learning_rate": 1.0610526315789474e-05, |
|
"loss": 1.5552, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 17.75, |
|
"learning_rate": 1.0594736842105264e-05, |
|
"loss": 1.4247, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 17.0, |
|
"learning_rate": 1.0578947368421053e-05, |
|
"loss": 1.0846, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 18.0, |
|
"learning_rate": 1.0563157894736843e-05, |
|
"loss": 1.4747, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 13.5, |
|
"learning_rate": 1.0547368421052632e-05, |
|
"loss": 1.3696, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 22.625, |
|
"learning_rate": 1.0531578947368422e-05, |
|
"loss": 1.2841, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 19.125, |
|
"learning_rate": 1.051578947368421e-05, |
|
"loss": 1.4871, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 16.875, |
|
"learning_rate": 1.05e-05, |
|
"loss": 1.0924, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 21.875, |
|
"learning_rate": 1.048421052631579e-05, |
|
"loss": 1.5268, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 23.5, |
|
"learning_rate": 1.0468421052631578e-05, |
|
"loss": 1.2527, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 21.625, |
|
"learning_rate": 1.0452631578947369e-05, |
|
"loss": 1.0235, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 20.5, |
|
"learning_rate": 1.0436842105263157e-05, |
|
"loss": 1.3699, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 8.8125, |
|
"learning_rate": 1.0421052631578948e-05, |
|
"loss": 1.2931, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 15.8125, |
|
"learning_rate": 1.0405263157894736e-05, |
|
"loss": 0.9101, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 19.875, |
|
"learning_rate": 1.0389473684210527e-05, |
|
"loss": 1.0891, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 17.5, |
|
"learning_rate": 1.0373684210526317e-05, |
|
"loss": 1.2518, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 26.25, |
|
"learning_rate": 1.0357894736842106e-05, |
|
"loss": 1.4293, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 13.25, |
|
"learning_rate": 1.0342105263157896e-05, |
|
"loss": 1.2779, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 19.75, |
|
"learning_rate": 1.0326315789473685e-05, |
|
"loss": 1.6036, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 10.5625, |
|
"learning_rate": 1.0310526315789475e-05, |
|
"loss": 1.1933, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 24.375, |
|
"learning_rate": 1.0294736842105264e-05, |
|
"loss": 1.2207, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 15.0625, |
|
"learning_rate": 1.0278947368421052e-05, |
|
"loss": 0.9875, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 17.375, |
|
"learning_rate": 1.0263157894736843e-05, |
|
"loss": 1.6437, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 21.125, |
|
"learning_rate": 1.0247368421052631e-05, |
|
"loss": 1.5689, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 19.75, |
|
"learning_rate": 1.0231578947368422e-05, |
|
"loss": 1.4524, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 28.125, |
|
"learning_rate": 1.021578947368421e-05, |
|
"loss": 1.7354, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 27.75, |
|
"learning_rate": 1.02e-05, |
|
"loss": 1.5075, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 22.0, |
|
"learning_rate": 1.018421052631579e-05, |
|
"loss": 1.5991, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 31.5, |
|
"learning_rate": 1.0168421052631578e-05, |
|
"loss": 1.3886, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 14.375, |
|
"learning_rate": 1.0152631578947368e-05, |
|
"loss": 1.2167, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 15.75, |
|
"learning_rate": 1.0136842105263157e-05, |
|
"loss": 1.4397, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 21.0, |
|
"learning_rate": 1.0121052631578949e-05, |
|
"loss": 1.8334, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 15.75, |
|
"learning_rate": 1.0105263157894738e-05, |
|
"loss": 1.5306, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 15.5625, |
|
"learning_rate": 1.0089473684210528e-05, |
|
"loss": 1.5956, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 33.5, |
|
"learning_rate": 1.0073684210526317e-05, |
|
"loss": 1.6766, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 24.625, |
|
"learning_rate": 1.0057894736842105e-05, |
|
"loss": 1.2643, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 10.75, |
|
"learning_rate": 1.0042105263157896e-05, |
|
"loss": 1.4199, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 10.4375, |
|
"learning_rate": 1.0026315789473684e-05, |
|
"loss": 1.3638, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 19.625, |
|
"learning_rate": 1.0010526315789474e-05, |
|
"loss": 1.1781, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 18.875, |
|
"learning_rate": 9.994736842105263e-06, |
|
"loss": 1.5178, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 20.875, |
|
"learning_rate": 9.978947368421053e-06, |
|
"loss": 1.175, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 14.5625, |
|
"learning_rate": 9.963157894736842e-06, |
|
"loss": 1.3539, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 20.75, |
|
"learning_rate": 9.94736842105263e-06, |
|
"loss": 1.1516, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 21.875, |
|
"learning_rate": 9.931578947368421e-06, |
|
"loss": 1.3705, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 19.125, |
|
"learning_rate": 9.91578947368421e-06, |
|
"loss": 1.1967, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 32.25, |
|
"learning_rate": 9.9e-06, |
|
"loss": 1.2262, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 13.0625, |
|
"learning_rate": 9.884210526315789e-06, |
|
"loss": 1.0924, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 19.5, |
|
"learning_rate": 9.868421052631579e-06, |
|
"loss": 1.4826, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 18.5, |
|
"learning_rate": 9.85263157894737e-06, |
|
"loss": 1.7201, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 22.75, |
|
"learning_rate": 9.836842105263158e-06, |
|
"loss": 1.8823, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 16.75, |
|
"learning_rate": 9.821052631578948e-06, |
|
"loss": 1.2102, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 19.0, |
|
"learning_rate": 9.805263157894737e-06, |
|
"loss": 0.9203, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 27.125, |
|
"learning_rate": 9.789473684210527e-06, |
|
"loss": 1.3829, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 16.0, |
|
"learning_rate": 9.773684210526316e-06, |
|
"loss": 1.4504, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 19.0, |
|
"learning_rate": 9.757894736842106e-06, |
|
"loss": 1.6228, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 15.0, |
|
"learning_rate": 9.742105263157895e-06, |
|
"loss": 1.1509, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 13.3125, |
|
"learning_rate": 9.726315789473684e-06, |
|
"loss": 1.4963, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 26.875, |
|
"learning_rate": 9.710526315789474e-06, |
|
"loss": 1.2939, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 19.625, |
|
"learning_rate": 9.694736842105263e-06, |
|
"loss": 1.396, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 17.25, |
|
"learning_rate": 9.678947368421053e-06, |
|
"loss": 1.5547, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 23.0, |
|
"learning_rate": 9.663157894736842e-06, |
|
"loss": 1.3473, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 17.5, |
|
"learning_rate": 9.647368421052632e-06, |
|
"loss": 1.3847, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 25.5, |
|
"learning_rate": 9.63157894736842e-06, |
|
"loss": 1.0258, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 20.25, |
|
"learning_rate": 9.61578947368421e-06, |
|
"loss": 1.4518, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 21.5, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 1.5955, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 16.875, |
|
"learning_rate": 9.58421052631579e-06, |
|
"loss": 1.3555, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 29.25, |
|
"learning_rate": 9.56842105263158e-06, |
|
"loss": 1.306, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 18.375, |
|
"learning_rate": 9.552631578947369e-06, |
|
"loss": 1.0536, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 16.25, |
|
"learning_rate": 9.53684210526316e-06, |
|
"loss": 1.4253, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 22.25, |
|
"learning_rate": 9.521052631578948e-06, |
|
"loss": 1.335, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 27.75, |
|
"learning_rate": 9.505263157894737e-06, |
|
"loss": 1.2626, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 32.0, |
|
"learning_rate": 9.489473684210527e-06, |
|
"loss": 1.3404, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 10.25, |
|
"learning_rate": 9.473684210526315e-06, |
|
"loss": 1.5403, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_loss": 1.389373540878296, |
|
"eval_runtime": 30.7104, |
|
"eval_samples_per_second": 32.562, |
|
"eval_steps_per_second": 32.562, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 10000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 2000, |
|
"total_flos": 3.227844083712e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|