|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.5, |
|
"eval_steps": 2000, |
|
"global_step": 10000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 20.375, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 1.7815, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 19.125, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 1.84, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 17.0, |
|
"learning_rate": 9e-07, |
|
"loss": 2.098, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 20.125, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.67, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 18.0, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.8452, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 19.25, |
|
"learning_rate": 1.8e-06, |
|
"loss": 2.1664, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 17.25, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 1.6483, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 18.625, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.9371, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 16.75, |
|
"learning_rate": 2.7e-06, |
|
"loss": 1.8967, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 14.3125, |
|
"learning_rate": 3e-06, |
|
"loss": 1.8326, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 15.9375, |
|
"learning_rate": 3.3e-06, |
|
"loss": 1.9072, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 21.5, |
|
"learning_rate": 3.6e-06, |
|
"loss": 1.9562, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 21.75, |
|
"learning_rate": 3.9e-06, |
|
"loss": 1.6976, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 16.0, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 2.2788, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 17.625, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.8362, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 22.5, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.8372, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 16.75, |
|
"learning_rate": 5.1e-06, |
|
"loss": 1.9134, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 21.875, |
|
"learning_rate": 5.4e-06, |
|
"loss": 1.8973, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 15.8125, |
|
"learning_rate": 5.7000000000000005e-06, |
|
"loss": 1.6868, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 19.75, |
|
"learning_rate": 6e-06, |
|
"loss": 1.6079, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 19.5, |
|
"learning_rate": 6.3e-06, |
|
"loss": 1.4638, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 18.625, |
|
"learning_rate": 6.6e-06, |
|
"loss": 1.8714, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 24.25, |
|
"learning_rate": 6.900000000000001e-06, |
|
"loss": 1.9379, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 13.0, |
|
"learning_rate": 7.2e-06, |
|
"loss": 1.364, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 17.75, |
|
"learning_rate": 7.5e-06, |
|
"loss": 1.8867, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 14.0, |
|
"learning_rate": 7.8e-06, |
|
"loss": 1.9215, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 23.5, |
|
"learning_rate": 8.1e-06, |
|
"loss": 1.8065, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 17.25, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 1.6864, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 25.25, |
|
"learning_rate": 8.7e-06, |
|
"loss": 1.6924, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 25.0, |
|
"learning_rate": 9e-06, |
|
"loss": 1.7671, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 18.375, |
|
"learning_rate": 9.3e-06, |
|
"loss": 1.8781, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 20.5, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 1.454, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 14.875, |
|
"learning_rate": 9.9e-06, |
|
"loss": 1.2016, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 23.625, |
|
"learning_rate": 1.02e-05, |
|
"loss": 1.6703, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 26.125, |
|
"learning_rate": 1.05e-05, |
|
"loss": 1.3712, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 18.375, |
|
"learning_rate": 1.08e-05, |
|
"loss": 1.3751, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 17.625, |
|
"learning_rate": 1.11e-05, |
|
"loss": 1.7888, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 34.25, |
|
"learning_rate": 1.1400000000000001e-05, |
|
"loss": 1.5242, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 22.5, |
|
"learning_rate": 1.1700000000000001e-05, |
|
"loss": 1.5371, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 31.375, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.3818, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 13.375, |
|
"learning_rate": 1.2299999999999999e-05, |
|
"loss": 1.9301, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 17.25, |
|
"learning_rate": 1.26e-05, |
|
"loss": 1.7813, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 22.125, |
|
"learning_rate": 1.29e-05, |
|
"loss": 1.6075, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 25.125, |
|
"learning_rate": 1.32e-05, |
|
"loss": 1.9157, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 16.375, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 1.6057, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 19.0, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 1.6342, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 13.625, |
|
"learning_rate": 1.4099999999999999e-05, |
|
"loss": 1.4012, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 14.0625, |
|
"learning_rate": 1.44e-05, |
|
"loss": 1.853, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 21.375, |
|
"learning_rate": 1.47e-05, |
|
"loss": 1.6546, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 20.0, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.3404, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 18.25, |
|
"learning_rate": 1.4984210526315789e-05, |
|
"loss": 1.5801, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 27.5, |
|
"learning_rate": 1.496842105263158e-05, |
|
"loss": 1.5567, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 28.25, |
|
"learning_rate": 1.4952631578947368e-05, |
|
"loss": 1.7417, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 27.0, |
|
"learning_rate": 1.4936842105263158e-05, |
|
"loss": 1.6283, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 25.5, |
|
"learning_rate": 1.4921052631578947e-05, |
|
"loss": 1.8534, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 11.625, |
|
"learning_rate": 1.4905263157894737e-05, |
|
"loss": 1.668, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 39.75, |
|
"learning_rate": 1.4889473684210526e-05, |
|
"loss": 1.4711, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 20.125, |
|
"learning_rate": 1.4873684210526315e-05, |
|
"loss": 1.4484, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 15.3125, |
|
"learning_rate": 1.4857894736842107e-05, |
|
"loss": 1.6434, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 21.375, |
|
"learning_rate": 1.4842105263157895e-05, |
|
"loss": 1.8766, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 22.125, |
|
"learning_rate": 1.4826315789473686e-05, |
|
"loss": 1.329, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 22.5, |
|
"learning_rate": 1.4810526315789474e-05, |
|
"loss": 1.5956, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 9.125, |
|
"learning_rate": 1.4794736842105265e-05, |
|
"loss": 1.5795, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 15.25, |
|
"learning_rate": 1.4778947368421053e-05, |
|
"loss": 1.7082, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 25.0, |
|
"learning_rate": 1.4763157894736842e-05, |
|
"loss": 1.7773, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 25.5, |
|
"learning_rate": 1.4747368421052632e-05, |
|
"loss": 1.3858, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 19.75, |
|
"learning_rate": 1.4731578947368421e-05, |
|
"loss": 1.6927, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 10.75, |
|
"learning_rate": 1.4715789473684211e-05, |
|
"loss": 1.5281, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 30.125, |
|
"learning_rate": 1.47e-05, |
|
"loss": 1.3842, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 41.5, |
|
"learning_rate": 1.468421052631579e-05, |
|
"loss": 1.7584, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 18.875, |
|
"learning_rate": 1.4668421052631579e-05, |
|
"loss": 1.5485, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 35.25, |
|
"learning_rate": 1.4652631578947367e-05, |
|
"loss": 1.61, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 19.25, |
|
"learning_rate": 1.4636842105263158e-05, |
|
"loss": 1.6709, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 17.5, |
|
"learning_rate": 1.4621052631578946e-05, |
|
"loss": 1.4464, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 18.625, |
|
"learning_rate": 1.4605263157894737e-05, |
|
"loss": 1.5036, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 22.25, |
|
"learning_rate": 1.4589473684210527e-05, |
|
"loss": 1.6983, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 32.5, |
|
"learning_rate": 1.4573684210526317e-05, |
|
"loss": 1.4551, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 18.625, |
|
"learning_rate": 1.4557894736842106e-05, |
|
"loss": 1.2903, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 25.875, |
|
"learning_rate": 1.4542105263157895e-05, |
|
"loss": 1.5937, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 24.125, |
|
"learning_rate": 1.4526315789473685e-05, |
|
"loss": 1.5994, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 26.375, |
|
"learning_rate": 1.4510526315789474e-05, |
|
"loss": 1.4018, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 18.75, |
|
"learning_rate": 1.4494736842105264e-05, |
|
"loss": 1.1078, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 19.25, |
|
"learning_rate": 1.4478947368421053e-05, |
|
"loss": 1.5669, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 16.75, |
|
"learning_rate": 1.4463157894736843e-05, |
|
"loss": 1.325, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 18.5, |
|
"learning_rate": 1.4447368421052632e-05, |
|
"loss": 1.3645, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 26.125, |
|
"learning_rate": 1.443157894736842e-05, |
|
"loss": 1.5002, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 22.875, |
|
"learning_rate": 1.441578947368421e-05, |
|
"loss": 1.3608, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 20.625, |
|
"learning_rate": 1.44e-05, |
|
"loss": 1.5648, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 32.0, |
|
"learning_rate": 1.438421052631579e-05, |
|
"loss": 1.1969, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 18.25, |
|
"learning_rate": 1.4368421052631578e-05, |
|
"loss": 1.5523, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 27.5, |
|
"learning_rate": 1.4352631578947369e-05, |
|
"loss": 1.5062, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 14.75, |
|
"learning_rate": 1.4336842105263159e-05, |
|
"loss": 1.5052, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 12.8125, |
|
"learning_rate": 1.4321052631578948e-05, |
|
"loss": 1.6696, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 18.0, |
|
"learning_rate": 1.4305263157894738e-05, |
|
"loss": 1.2593, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 22.0, |
|
"learning_rate": 1.4289473684210527e-05, |
|
"loss": 0.9601, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 17.5, |
|
"learning_rate": 1.4273684210526317e-05, |
|
"loss": 1.3436, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 29.25, |
|
"learning_rate": 1.4257894736842106e-05, |
|
"loss": 1.9493, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 14.375, |
|
"learning_rate": 1.4242105263157896e-05, |
|
"loss": 1.3317, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 12.625, |
|
"learning_rate": 1.4226315789473685e-05, |
|
"loss": 1.263, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 25.625, |
|
"learning_rate": 1.4210526315789473e-05, |
|
"loss": 1.4426, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 27.75, |
|
"learning_rate": 1.4194736842105264e-05, |
|
"loss": 1.738, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 20.875, |
|
"learning_rate": 1.4178947368421052e-05, |
|
"loss": 1.474, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 23.25, |
|
"learning_rate": 1.4163157894736843e-05, |
|
"loss": 1.8161, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 18.25, |
|
"learning_rate": 1.4147368421052631e-05, |
|
"loss": 1.2116, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 25.25, |
|
"learning_rate": 1.4131578947368422e-05, |
|
"loss": 1.7062, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 21.125, |
|
"learning_rate": 1.411578947368421e-05, |
|
"loss": 1.5641, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 15.625, |
|
"learning_rate": 1.4099999999999999e-05, |
|
"loss": 1.5193, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 20.75, |
|
"learning_rate": 1.408421052631579e-05, |
|
"loss": 1.4775, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 16.375, |
|
"learning_rate": 1.406842105263158e-05, |
|
"loss": 1.4353, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 29.375, |
|
"learning_rate": 1.405263157894737e-05, |
|
"loss": 1.6741, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 36.5, |
|
"learning_rate": 1.4036842105263158e-05, |
|
"loss": 1.4158, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 20.5, |
|
"learning_rate": 1.4021052631578949e-05, |
|
"loss": 1.2708, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 19.25, |
|
"learning_rate": 1.4005263157894737e-05, |
|
"loss": 1.1376, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 18.5, |
|
"learning_rate": 1.3989473684210526e-05, |
|
"loss": 1.3171, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 21.375, |
|
"learning_rate": 1.3973684210526316e-05, |
|
"loss": 1.5621, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 19.5, |
|
"learning_rate": 1.3957894736842105e-05, |
|
"loss": 1.8743, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 13.4375, |
|
"learning_rate": 1.3942105263157895e-05, |
|
"loss": 1.5132, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 22.5, |
|
"learning_rate": 1.3926315789473684e-05, |
|
"loss": 1.6475, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 13.25, |
|
"learning_rate": 1.3910526315789474e-05, |
|
"loss": 1.4806, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 19.0, |
|
"learning_rate": 1.3894736842105263e-05, |
|
"loss": 1.5761, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 17.125, |
|
"learning_rate": 1.3878947368421052e-05, |
|
"loss": 1.88, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 19.0, |
|
"learning_rate": 1.3863157894736842e-05, |
|
"loss": 1.3399, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 21.25, |
|
"learning_rate": 1.384736842105263e-05, |
|
"loss": 1.6421, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 19.625, |
|
"learning_rate": 1.3831578947368421e-05, |
|
"loss": 1.7212, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 17.75, |
|
"learning_rate": 1.3815789473684211e-05, |
|
"loss": 1.6537, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 13.5625, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 1.6197, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 18.875, |
|
"learning_rate": 1.378421052631579e-05, |
|
"loss": 1.5681, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 40.75, |
|
"learning_rate": 1.3768421052631579e-05, |
|
"loss": 1.3511, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 13.0, |
|
"learning_rate": 1.375263157894737e-05, |
|
"loss": 1.3007, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 10.9375, |
|
"learning_rate": 1.3736842105263158e-05, |
|
"loss": 1.18, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 19.125, |
|
"learning_rate": 1.3721052631578948e-05, |
|
"loss": 1.6356, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 16.375, |
|
"learning_rate": 1.3705263157894737e-05, |
|
"loss": 1.4107, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 26.0, |
|
"learning_rate": 1.3689473684210527e-05, |
|
"loss": 1.3306, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 22.5, |
|
"learning_rate": 1.3673684210526316e-05, |
|
"loss": 1.5359, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 20.0, |
|
"learning_rate": 1.3657894736842106e-05, |
|
"loss": 1.404, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 19.125, |
|
"learning_rate": 1.3642105263157895e-05, |
|
"loss": 1.2873, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 11.1875, |
|
"learning_rate": 1.3626315789473684e-05, |
|
"loss": 1.3952, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 22.0, |
|
"learning_rate": 1.3610526315789474e-05, |
|
"loss": 1.4608, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 20.875, |
|
"learning_rate": 1.3594736842105263e-05, |
|
"loss": 1.5216, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 18.5, |
|
"learning_rate": 1.3578947368421053e-05, |
|
"loss": 1.4667, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 31.75, |
|
"learning_rate": 1.3563157894736842e-05, |
|
"loss": 1.6675, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 22.125, |
|
"learning_rate": 1.3547368421052634e-05, |
|
"loss": 1.4568, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 15.75, |
|
"learning_rate": 1.3531578947368422e-05, |
|
"loss": 1.4352, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 19.75, |
|
"learning_rate": 1.3515789473684211e-05, |
|
"loss": 1.4362, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 29.5, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 1.2622, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 13.75, |
|
"learning_rate": 1.348421052631579e-05, |
|
"loss": 1.4709, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 18.25, |
|
"learning_rate": 1.346842105263158e-05, |
|
"loss": 1.3638, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 22.375, |
|
"learning_rate": 1.3452631578947369e-05, |
|
"loss": 1.7496, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 24.625, |
|
"learning_rate": 1.343684210526316e-05, |
|
"loss": 1.7741, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 19.0, |
|
"learning_rate": 1.3421052631578948e-05, |
|
"loss": 1.3499, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 31.625, |
|
"learning_rate": 1.3405263157894736e-05, |
|
"loss": 1.3528, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 25.75, |
|
"learning_rate": 1.3389473684210527e-05, |
|
"loss": 1.4893, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 12.3125, |
|
"learning_rate": 1.3373684210526315e-05, |
|
"loss": 1.4222, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 23.0, |
|
"learning_rate": 1.3357894736842106e-05, |
|
"loss": 1.7875, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 30.375, |
|
"learning_rate": 1.3342105263157894e-05, |
|
"loss": 1.426, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 14.25, |
|
"learning_rate": 1.3326315789473685e-05, |
|
"loss": 1.8679, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 19.875, |
|
"learning_rate": 1.3310526315789473e-05, |
|
"loss": 1.4155, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 29.125, |
|
"learning_rate": 1.3294736842105262e-05, |
|
"loss": 1.7515, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 18.125, |
|
"learning_rate": 1.3278947368421054e-05, |
|
"loss": 1.4494, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 18.75, |
|
"learning_rate": 1.3263157894736843e-05, |
|
"loss": 1.4159, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 24.75, |
|
"learning_rate": 1.3247368421052633e-05, |
|
"loss": 1.5582, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 24.25, |
|
"learning_rate": 1.3231578947368422e-05, |
|
"loss": 1.5761, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 30.375, |
|
"learning_rate": 1.3215789473684212e-05, |
|
"loss": 1.5033, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 16.75, |
|
"learning_rate": 1.32e-05, |
|
"loss": 1.4209, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 22.25, |
|
"learning_rate": 1.318421052631579e-05, |
|
"loss": 1.5761, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 25.0, |
|
"learning_rate": 1.316842105263158e-05, |
|
"loss": 1.4146, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 20.375, |
|
"learning_rate": 1.3152631578947368e-05, |
|
"loss": 1.2064, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 25.75, |
|
"learning_rate": 1.3136842105263159e-05, |
|
"loss": 1.1254, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 20.0, |
|
"learning_rate": 1.3121052631578947e-05, |
|
"loss": 1.5665, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 23.625, |
|
"learning_rate": 1.3105263157894738e-05, |
|
"loss": 1.5582, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 22.875, |
|
"learning_rate": 1.3089473684210526e-05, |
|
"loss": 1.2198, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 15.875, |
|
"learning_rate": 1.3073684210526315e-05, |
|
"loss": 1.4875, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 22.25, |
|
"learning_rate": 1.3057894736842105e-05, |
|
"loss": 1.3077, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 13.125, |
|
"learning_rate": 1.3042105263157894e-05, |
|
"loss": 1.571, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 16.875, |
|
"learning_rate": 1.3026315789473684e-05, |
|
"loss": 1.2261, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 12.0, |
|
"learning_rate": 1.3010526315789475e-05, |
|
"loss": 1.1795, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 23.0, |
|
"learning_rate": 1.2994736842105265e-05, |
|
"loss": 1.2311, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 10.4375, |
|
"learning_rate": 1.2978947368421054e-05, |
|
"loss": 1.5966, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 18.75, |
|
"learning_rate": 1.2963157894736842e-05, |
|
"loss": 1.5122, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 10.5, |
|
"learning_rate": 1.2947368421052633e-05, |
|
"loss": 1.2532, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 9.6875, |
|
"learning_rate": 1.2931578947368421e-05, |
|
"loss": 1.3394, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 19.375, |
|
"learning_rate": 1.2915789473684212e-05, |
|
"loss": 1.6067, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 23.0, |
|
"learning_rate": 1.29e-05, |
|
"loss": 1.4977, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 16.375, |
|
"learning_rate": 1.288421052631579e-05, |
|
"loss": 1.4339, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 19.625, |
|
"learning_rate": 1.2868421052631579e-05, |
|
"loss": 1.4789, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 11.125, |
|
"learning_rate": 1.2852631578947368e-05, |
|
"loss": 1.3857, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 9.3125, |
|
"learning_rate": 1.2836842105263158e-05, |
|
"loss": 1.4344, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 21.5, |
|
"learning_rate": 1.2821052631578947e-05, |
|
"loss": 1.416, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 23.875, |
|
"learning_rate": 1.2805263157894737e-05, |
|
"loss": 1.4628, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 16.5, |
|
"learning_rate": 1.2789473684210526e-05, |
|
"loss": 1.3098, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 21.375, |
|
"learning_rate": 1.2773684210526316e-05, |
|
"loss": 1.4423, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 18.625, |
|
"learning_rate": 1.2757894736842106e-05, |
|
"loss": 1.1756, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 24.625, |
|
"learning_rate": 1.2742105263157895e-05, |
|
"loss": 1.3788, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 14.1875, |
|
"learning_rate": 1.2726315789473685e-05, |
|
"loss": 1.2287, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 24.25, |
|
"learning_rate": 1.2710526315789474e-05, |
|
"loss": 1.1927, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 14.625, |
|
"learning_rate": 1.2694736842105264e-05, |
|
"loss": 1.5478, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 11.4375, |
|
"learning_rate": 1.2678947368421053e-05, |
|
"loss": 1.2426, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 30.25, |
|
"learning_rate": 1.2663157894736843e-05, |
|
"loss": 1.1653, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 21.875, |
|
"learning_rate": 1.2647368421052632e-05, |
|
"loss": 1.3693, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 14.3125, |
|
"learning_rate": 1.263157894736842e-05, |
|
"loss": 1.7071, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"eval_loss": 1.4587359428405762, |
|
"eval_runtime": 31.2193, |
|
"eval_samples_per_second": 32.032, |
|
"eval_steps_per_second": 32.032, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 12.25, |
|
"learning_rate": 1.2615789473684211e-05, |
|
"loss": 1.1877, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 33.25, |
|
"learning_rate": 1.26e-05, |
|
"loss": 1.6576, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 17.0, |
|
"learning_rate": 1.258421052631579e-05, |
|
"loss": 1.3037, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 17.625, |
|
"learning_rate": 1.2568421052631579e-05, |
|
"loss": 1.2634, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 18.5, |
|
"learning_rate": 1.2552631578947369e-05, |
|
"loss": 1.5936, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 23.625, |
|
"learning_rate": 1.2536842105263158e-05, |
|
"loss": 1.1655, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 12.625, |
|
"learning_rate": 1.2521052631578946e-05, |
|
"loss": 1.3446, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 7.34375, |
|
"learning_rate": 1.2505263157894737e-05, |
|
"loss": 1.5295, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 12.4375, |
|
"learning_rate": 1.2489473684210527e-05, |
|
"loss": 1.5361, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 15.9375, |
|
"learning_rate": 1.2473684210526317e-05, |
|
"loss": 1.3638, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 15.4375, |
|
"learning_rate": 1.2457894736842106e-05, |
|
"loss": 1.472, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 22.0, |
|
"learning_rate": 1.2442105263157896e-05, |
|
"loss": 1.0827, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 12.0625, |
|
"learning_rate": 1.2426315789473685e-05, |
|
"loss": 1.2359, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 12.75, |
|
"learning_rate": 1.2410526315789474e-05, |
|
"loss": 1.4345, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 19.5, |
|
"learning_rate": 1.2394736842105264e-05, |
|
"loss": 1.5083, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 29.125, |
|
"learning_rate": 1.2378947368421053e-05, |
|
"loss": 1.2703, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 19.0, |
|
"learning_rate": 1.2363157894736843e-05, |
|
"loss": 1.2485, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 30.0, |
|
"learning_rate": 1.2347368421052631e-05, |
|
"loss": 0.992, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 27.625, |
|
"learning_rate": 1.2331578947368422e-05, |
|
"loss": 1.5069, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 23.375, |
|
"learning_rate": 1.231578947368421e-05, |
|
"loss": 1.5471, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 18.5, |
|
"learning_rate": 1.2299999999999999e-05, |
|
"loss": 1.3682, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 17.125, |
|
"learning_rate": 1.228421052631579e-05, |
|
"loss": 1.5041, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 15.875, |
|
"learning_rate": 1.2268421052631578e-05, |
|
"loss": 1.5487, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 29.75, |
|
"learning_rate": 1.2252631578947368e-05, |
|
"loss": 1.2019, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 12.625, |
|
"learning_rate": 1.2236842105263159e-05, |
|
"loss": 1.4014, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 16.875, |
|
"learning_rate": 1.2221052631578949e-05, |
|
"loss": 1.3086, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 15.5, |
|
"learning_rate": 1.2205263157894738e-05, |
|
"loss": 1.6796, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 18.75, |
|
"learning_rate": 1.2189473684210526e-05, |
|
"loss": 1.2801, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 9.75, |
|
"learning_rate": 1.2173684210526317e-05, |
|
"loss": 1.1822, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 27.75, |
|
"learning_rate": 1.2157894736842105e-05, |
|
"loss": 1.428, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 21.125, |
|
"learning_rate": 1.2142105263157896e-05, |
|
"loss": 1.4445, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 20.5, |
|
"learning_rate": 1.2126315789473684e-05, |
|
"loss": 1.745, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 14.3125, |
|
"learning_rate": 1.2110526315789475e-05, |
|
"loss": 1.8124, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 31.0, |
|
"learning_rate": 1.2094736842105263e-05, |
|
"loss": 1.3533, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 11.5, |
|
"learning_rate": 1.2078947368421052e-05, |
|
"loss": 1.4255, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 17.5, |
|
"learning_rate": 1.2063157894736842e-05, |
|
"loss": 1.3926, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 13.75, |
|
"learning_rate": 1.2047368421052631e-05, |
|
"loss": 1.295, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 16.375, |
|
"learning_rate": 1.2031578947368421e-05, |
|
"loss": 1.0575, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 30.5, |
|
"learning_rate": 1.201578947368421e-05, |
|
"loss": 1.4961, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 17.5, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.3838, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 16.75, |
|
"learning_rate": 1.1984210526315789e-05, |
|
"loss": 1.5833, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 22.125, |
|
"learning_rate": 1.196842105263158e-05, |
|
"loss": 1.2771, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 19.125, |
|
"learning_rate": 1.195263157894737e-05, |
|
"loss": 1.6264, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 19.5, |
|
"learning_rate": 1.1936842105263158e-05, |
|
"loss": 1.4304, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 11.1875, |
|
"learning_rate": 1.1921052631578949e-05, |
|
"loss": 1.1868, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 39.25, |
|
"learning_rate": 1.1905263157894737e-05, |
|
"loss": 1.4066, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 24.5, |
|
"learning_rate": 1.1889473684210528e-05, |
|
"loss": 1.3723, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 21.75, |
|
"learning_rate": 1.1873684210526316e-05, |
|
"loss": 1.507, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 22.625, |
|
"learning_rate": 1.1857894736842105e-05, |
|
"loss": 1.4822, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 23.25, |
|
"learning_rate": 1.1842105263157895e-05, |
|
"loss": 1.3431, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 14.25, |
|
"learning_rate": 1.1826315789473684e-05, |
|
"loss": 1.438, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 38.0, |
|
"learning_rate": 1.1810526315789474e-05, |
|
"loss": 1.3837, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 17.625, |
|
"learning_rate": 1.1794736842105263e-05, |
|
"loss": 1.3997, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 13.375, |
|
"learning_rate": 1.1778947368421053e-05, |
|
"loss": 1.5068, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 26.75, |
|
"learning_rate": 1.1763157894736842e-05, |
|
"loss": 1.5514, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 23.375, |
|
"learning_rate": 1.174736842105263e-05, |
|
"loss": 1.4679, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 12.6875, |
|
"learning_rate": 1.173157894736842e-05, |
|
"loss": 1.3298, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 16.875, |
|
"learning_rate": 1.171578947368421e-05, |
|
"loss": 1.2123, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 21.625, |
|
"learning_rate": 1.1700000000000001e-05, |
|
"loss": 1.3493, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 25.75, |
|
"learning_rate": 1.168421052631579e-05, |
|
"loss": 1.3984, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 20.5, |
|
"learning_rate": 1.166842105263158e-05, |
|
"loss": 1.435, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 23.0, |
|
"learning_rate": 1.1652631578947369e-05, |
|
"loss": 1.273, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 15.75, |
|
"learning_rate": 1.1636842105263158e-05, |
|
"loss": 1.2547, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 7.71875, |
|
"learning_rate": 1.1621052631578948e-05, |
|
"loss": 1.1227, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 29.125, |
|
"learning_rate": 1.1605263157894737e-05, |
|
"loss": 1.3045, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 20.25, |
|
"learning_rate": 1.1589473684210527e-05, |
|
"loss": 1.4858, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 14.4375, |
|
"learning_rate": 1.1573684210526316e-05, |
|
"loss": 1.1192, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 22.875, |
|
"learning_rate": 1.1557894736842106e-05, |
|
"loss": 1.4049, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 14.25, |
|
"learning_rate": 1.1542105263157895e-05, |
|
"loss": 1.3724, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 24.25, |
|
"learning_rate": 1.1526315789473683e-05, |
|
"loss": 1.3053, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 23.0, |
|
"learning_rate": 1.1510526315789474e-05, |
|
"loss": 1.3986, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 20.0, |
|
"learning_rate": 1.1494736842105262e-05, |
|
"loss": 1.5444, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 16.25, |
|
"learning_rate": 1.1478947368421053e-05, |
|
"loss": 1.6835, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 19.375, |
|
"learning_rate": 1.1463157894736841e-05, |
|
"loss": 1.2312, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 13.0625, |
|
"learning_rate": 1.1447368421052632e-05, |
|
"loss": 1.0698, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 19.875, |
|
"learning_rate": 1.1431578947368422e-05, |
|
"loss": 1.4871, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 10.875, |
|
"learning_rate": 1.141578947368421e-05, |
|
"loss": 1.257, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 11.8125, |
|
"learning_rate": 1.1400000000000001e-05, |
|
"loss": 1.5711, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 22.125, |
|
"learning_rate": 1.138421052631579e-05, |
|
"loss": 1.592, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 25.5, |
|
"learning_rate": 1.136842105263158e-05, |
|
"loss": 0.9352, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 17.625, |
|
"learning_rate": 1.1352631578947369e-05, |
|
"loss": 1.3347, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 26.75, |
|
"learning_rate": 1.1336842105263159e-05, |
|
"loss": 1.0488, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 18.75, |
|
"learning_rate": 1.1321052631578948e-05, |
|
"loss": 1.7698, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 26.0, |
|
"learning_rate": 1.1305263157894736e-05, |
|
"loss": 1.2135, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 19.125, |
|
"learning_rate": 1.1289473684210527e-05, |
|
"loss": 1.5619, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 20.125, |
|
"learning_rate": 1.1273684210526315e-05, |
|
"loss": 1.5739, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 51.25, |
|
"learning_rate": 1.1257894736842106e-05, |
|
"loss": 1.3658, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 16.75, |
|
"learning_rate": 1.1242105263157894e-05, |
|
"loss": 1.6634, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 16.375, |
|
"learning_rate": 1.1226315789473685e-05, |
|
"loss": 1.1902, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 20.375, |
|
"learning_rate": 1.1210526315789473e-05, |
|
"loss": 1.2131, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 18.875, |
|
"learning_rate": 1.1194736842105264e-05, |
|
"loss": 1.4465, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 30.75, |
|
"learning_rate": 1.1178947368421054e-05, |
|
"loss": 1.6064, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 15.1875, |
|
"learning_rate": 1.1163157894736842e-05, |
|
"loss": 1.5864, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 23.25, |
|
"learning_rate": 1.1147368421052633e-05, |
|
"loss": 1.2329, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 22.375, |
|
"learning_rate": 1.1131578947368421e-05, |
|
"loss": 1.3795, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 24.0, |
|
"learning_rate": 1.1115789473684212e-05, |
|
"loss": 1.4095, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 19.0, |
|
"learning_rate": 1.11e-05, |
|
"loss": 1.1682, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 26.0, |
|
"learning_rate": 1.108421052631579e-05, |
|
"loss": 1.1633, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 18.75, |
|
"learning_rate": 1.106842105263158e-05, |
|
"loss": 1.4457, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 25.75, |
|
"learning_rate": 1.1052631578947368e-05, |
|
"loss": 1.4336, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 26.625, |
|
"learning_rate": 1.1036842105263158e-05, |
|
"loss": 1.1143, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 16.5, |
|
"learning_rate": 1.1021052631578947e-05, |
|
"loss": 1.2105, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 38.75, |
|
"learning_rate": 1.1005263157894737e-05, |
|
"loss": 1.6449, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 23.375, |
|
"learning_rate": 1.0989473684210526e-05, |
|
"loss": 1.3151, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 15.3125, |
|
"learning_rate": 1.0973684210526316e-05, |
|
"loss": 1.6362, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 24.625, |
|
"learning_rate": 1.0957894736842105e-05, |
|
"loss": 1.6158, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 22.125, |
|
"learning_rate": 1.0942105263157894e-05, |
|
"loss": 1.6301, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 24.625, |
|
"learning_rate": 1.0926315789473684e-05, |
|
"loss": 1.4099, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 28.625, |
|
"learning_rate": 1.0910526315789474e-05, |
|
"loss": 1.6574, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 18.75, |
|
"learning_rate": 1.0894736842105265e-05, |
|
"loss": 1.4458, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 16.25, |
|
"learning_rate": 1.0878947368421053e-05, |
|
"loss": 1.4384, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 17.5, |
|
"learning_rate": 1.0863157894736844e-05, |
|
"loss": 1.4054, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 13.5625, |
|
"learning_rate": 1.0847368421052632e-05, |
|
"loss": 1.1473, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 20.5, |
|
"learning_rate": 1.0831578947368421e-05, |
|
"loss": 1.2524, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 22.375, |
|
"learning_rate": 1.0815789473684211e-05, |
|
"loss": 1.1765, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 17.375, |
|
"learning_rate": 1.08e-05, |
|
"loss": 1.315, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 33.75, |
|
"learning_rate": 1.078421052631579e-05, |
|
"loss": 1.6893, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 17.375, |
|
"learning_rate": 1.0768421052631579e-05, |
|
"loss": 1.3754, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 24.125, |
|
"learning_rate": 1.075263157894737e-05, |
|
"loss": 1.0585, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 26.75, |
|
"learning_rate": 1.0736842105263158e-05, |
|
"loss": 1.5381, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 21.25, |
|
"learning_rate": 1.0721052631578947e-05, |
|
"loss": 1.2503, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 21.625, |
|
"learning_rate": 1.0705263157894737e-05, |
|
"loss": 1.4186, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 13.5625, |
|
"learning_rate": 1.0689473684210526e-05, |
|
"loss": 1.5066, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 25.375, |
|
"learning_rate": 1.0673684210526316e-05, |
|
"loss": 1.2001, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 21.5, |
|
"learning_rate": 1.0657894736842106e-05, |
|
"loss": 1.4222, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 20.25, |
|
"learning_rate": 1.0642105263157897e-05, |
|
"loss": 1.556, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 18.125, |
|
"learning_rate": 1.0626315789473685e-05, |
|
"loss": 1.1219, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 20.125, |
|
"learning_rate": 1.0610526315789474e-05, |
|
"loss": 1.5552, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 17.75, |
|
"learning_rate": 1.0594736842105264e-05, |
|
"loss": 1.4247, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 17.0, |
|
"learning_rate": 1.0578947368421053e-05, |
|
"loss": 1.0846, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 18.0, |
|
"learning_rate": 1.0563157894736843e-05, |
|
"loss": 1.4747, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 13.5, |
|
"learning_rate": 1.0547368421052632e-05, |
|
"loss": 1.3696, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 22.625, |
|
"learning_rate": 1.0531578947368422e-05, |
|
"loss": 1.2841, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 19.125, |
|
"learning_rate": 1.051578947368421e-05, |
|
"loss": 1.4871, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 16.875, |
|
"learning_rate": 1.05e-05, |
|
"loss": 1.0924, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 21.875, |
|
"learning_rate": 1.048421052631579e-05, |
|
"loss": 1.5268, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 23.5, |
|
"learning_rate": 1.0468421052631578e-05, |
|
"loss": 1.2527, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 21.625, |
|
"learning_rate": 1.0452631578947369e-05, |
|
"loss": 1.0235, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 20.5, |
|
"learning_rate": 1.0436842105263157e-05, |
|
"loss": 1.3699, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 8.8125, |
|
"learning_rate": 1.0421052631578948e-05, |
|
"loss": 1.2931, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 15.8125, |
|
"learning_rate": 1.0405263157894736e-05, |
|
"loss": 0.9101, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 19.875, |
|
"learning_rate": 1.0389473684210527e-05, |
|
"loss": 1.0891, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 17.5, |
|
"learning_rate": 1.0373684210526317e-05, |
|
"loss": 1.2518, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 26.25, |
|
"learning_rate": 1.0357894736842106e-05, |
|
"loss": 1.4293, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 13.25, |
|
"learning_rate": 1.0342105263157896e-05, |
|
"loss": 1.2779, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 19.75, |
|
"learning_rate": 1.0326315789473685e-05, |
|
"loss": 1.6036, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 10.5625, |
|
"learning_rate": 1.0310526315789475e-05, |
|
"loss": 1.1933, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 24.375, |
|
"learning_rate": 1.0294736842105264e-05, |
|
"loss": 1.2207, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 15.0625, |
|
"learning_rate": 1.0278947368421052e-05, |
|
"loss": 0.9875, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 17.375, |
|
"learning_rate": 1.0263157894736843e-05, |
|
"loss": 1.6437, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 21.125, |
|
"learning_rate": 1.0247368421052631e-05, |
|
"loss": 1.5689, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 19.75, |
|
"learning_rate": 1.0231578947368422e-05, |
|
"loss": 1.4524, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 28.125, |
|
"learning_rate": 1.021578947368421e-05, |
|
"loss": 1.7354, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 27.75, |
|
"learning_rate": 1.02e-05, |
|
"loss": 1.5075, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 22.0, |
|
"learning_rate": 1.018421052631579e-05, |
|
"loss": 1.5991, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 31.5, |
|
"learning_rate": 1.0168421052631578e-05, |
|
"loss": 1.3886, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 14.375, |
|
"learning_rate": 1.0152631578947368e-05, |
|
"loss": 1.2167, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 15.75, |
|
"learning_rate": 1.0136842105263157e-05, |
|
"loss": 1.4397, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 21.0, |
|
"learning_rate": 1.0121052631578949e-05, |
|
"loss": 1.8334, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 15.75, |
|
"learning_rate": 1.0105263157894738e-05, |
|
"loss": 1.5306, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 15.5625, |
|
"learning_rate": 1.0089473684210528e-05, |
|
"loss": 1.5956, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 33.5, |
|
"learning_rate": 1.0073684210526317e-05, |
|
"loss": 1.6766, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 24.625, |
|
"learning_rate": 1.0057894736842105e-05, |
|
"loss": 1.2643, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 10.75, |
|
"learning_rate": 1.0042105263157896e-05, |
|
"loss": 1.4199, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 10.4375, |
|
"learning_rate": 1.0026315789473684e-05, |
|
"loss": 1.3638, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 19.625, |
|
"learning_rate": 1.0010526315789474e-05, |
|
"loss": 1.1781, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 18.875, |
|
"learning_rate": 9.994736842105263e-06, |
|
"loss": 1.5178, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 20.875, |
|
"learning_rate": 9.978947368421053e-06, |
|
"loss": 1.175, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 14.5625, |
|
"learning_rate": 9.963157894736842e-06, |
|
"loss": 1.3539, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 20.75, |
|
"learning_rate": 9.94736842105263e-06, |
|
"loss": 1.1516, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 21.875, |
|
"learning_rate": 9.931578947368421e-06, |
|
"loss": 1.3705, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 19.125, |
|
"learning_rate": 9.91578947368421e-06, |
|
"loss": 1.1967, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 32.25, |
|
"learning_rate": 9.9e-06, |
|
"loss": 1.2262, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 13.0625, |
|
"learning_rate": 9.884210526315789e-06, |
|
"loss": 1.0924, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 19.5, |
|
"learning_rate": 9.868421052631579e-06, |
|
"loss": 1.4826, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 18.5, |
|
"learning_rate": 9.85263157894737e-06, |
|
"loss": 1.7201, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 22.75, |
|
"learning_rate": 9.836842105263158e-06, |
|
"loss": 1.8823, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 16.75, |
|
"learning_rate": 9.821052631578948e-06, |
|
"loss": 1.2102, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 19.0, |
|
"learning_rate": 9.805263157894737e-06, |
|
"loss": 0.9203, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 27.125, |
|
"learning_rate": 9.789473684210527e-06, |
|
"loss": 1.3829, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 16.0, |
|
"learning_rate": 9.773684210526316e-06, |
|
"loss": 1.4504, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 19.0, |
|
"learning_rate": 9.757894736842106e-06, |
|
"loss": 1.6228, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 15.0, |
|
"learning_rate": 9.742105263157895e-06, |
|
"loss": 1.1509, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 13.3125, |
|
"learning_rate": 9.726315789473684e-06, |
|
"loss": 1.4963, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 26.875, |
|
"learning_rate": 9.710526315789474e-06, |
|
"loss": 1.2939, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 19.625, |
|
"learning_rate": 9.694736842105263e-06, |
|
"loss": 1.396, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 17.25, |
|
"learning_rate": 9.678947368421053e-06, |
|
"loss": 1.5547, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 23.0, |
|
"learning_rate": 9.663157894736842e-06, |
|
"loss": 1.3473, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 17.5, |
|
"learning_rate": 9.647368421052632e-06, |
|
"loss": 1.3847, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 25.5, |
|
"learning_rate": 9.63157894736842e-06, |
|
"loss": 1.0258, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 20.25, |
|
"learning_rate": 9.61578947368421e-06, |
|
"loss": 1.4518, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 21.5, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 1.5955, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 16.875, |
|
"learning_rate": 9.58421052631579e-06, |
|
"loss": 1.3555, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 29.25, |
|
"learning_rate": 9.56842105263158e-06, |
|
"loss": 1.306, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 18.375, |
|
"learning_rate": 9.552631578947369e-06, |
|
"loss": 1.0536, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 16.25, |
|
"learning_rate": 9.53684210526316e-06, |
|
"loss": 1.4253, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 22.25, |
|
"learning_rate": 9.521052631578948e-06, |
|
"loss": 1.335, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 27.75, |
|
"learning_rate": 9.505263157894737e-06, |
|
"loss": 1.2626, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 32.0, |
|
"learning_rate": 9.489473684210527e-06, |
|
"loss": 1.3404, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 10.25, |
|
"learning_rate": 9.473684210526315e-06, |
|
"loss": 1.5403, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_loss": 1.389373540878296, |
|
"eval_runtime": 30.7104, |
|
"eval_samples_per_second": 32.562, |
|
"eval_steps_per_second": 32.562, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 23.0, |
|
"learning_rate": 9.457894736842106e-06, |
|
"loss": 1.2409, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 15.125, |
|
"learning_rate": 9.442105263157894e-06, |
|
"loss": 1.2763, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 9.625, |
|
"learning_rate": 9.426315789473685e-06, |
|
"loss": 1.3625, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 10.25, |
|
"learning_rate": 9.410526315789473e-06, |
|
"loss": 1.2428, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 21.375, |
|
"learning_rate": 9.394736842105262e-06, |
|
"loss": 1.3786, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 24.5, |
|
"learning_rate": 9.378947368421052e-06, |
|
"loss": 1.5145, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 24.875, |
|
"learning_rate": 9.363157894736841e-06, |
|
"loss": 1.6081, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 20.25, |
|
"learning_rate": 9.347368421052631e-06, |
|
"loss": 1.7527, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 13.4375, |
|
"learning_rate": 9.331578947368422e-06, |
|
"loss": 1.2864, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 19.375, |
|
"learning_rate": 9.315789473684212e-06, |
|
"loss": 1.3088, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 30.375, |
|
"learning_rate": 9.3e-06, |
|
"loss": 1.3268, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 16.625, |
|
"learning_rate": 9.28421052631579e-06, |
|
"loss": 1.4926, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 33.25, |
|
"learning_rate": 9.26842105263158e-06, |
|
"loss": 1.414, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 22.125, |
|
"learning_rate": 9.252631578947368e-06, |
|
"loss": 1.7257, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 25.375, |
|
"learning_rate": 9.236842105263159e-06, |
|
"loss": 1.5941, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 14.375, |
|
"learning_rate": 9.221052631578947e-06, |
|
"loss": 1.351, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 26.375, |
|
"learning_rate": 9.205263157894738e-06, |
|
"loss": 1.2855, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 17.0, |
|
"learning_rate": 9.189473684210526e-06, |
|
"loss": 1.6742, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 18.125, |
|
"learning_rate": 9.173684210526315e-06, |
|
"loss": 1.4892, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 21.125, |
|
"learning_rate": 9.157894736842105e-06, |
|
"loss": 1.438, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 15.8125, |
|
"learning_rate": 9.142105263157894e-06, |
|
"loss": 1.5795, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 24.25, |
|
"learning_rate": 9.126315789473684e-06, |
|
"loss": 1.3318, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 22.0, |
|
"learning_rate": 9.110526315789473e-06, |
|
"loss": 1.4483, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 21.375, |
|
"learning_rate": 9.094736842105263e-06, |
|
"loss": 1.547, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 12.4375, |
|
"learning_rate": 9.078947368421054e-06, |
|
"loss": 1.4173, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 21.0, |
|
"learning_rate": 9.063157894736842e-06, |
|
"loss": 1.3772, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 20.75, |
|
"learning_rate": 9.047368421052633e-06, |
|
"loss": 1.7047, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 16.125, |
|
"learning_rate": 9.031578947368421e-06, |
|
"loss": 1.1346, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 13.25, |
|
"learning_rate": 9.015789473684212e-06, |
|
"loss": 1.2722, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 21.25, |
|
"learning_rate": 9e-06, |
|
"loss": 1.611, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 19.125, |
|
"learning_rate": 8.98421052631579e-06, |
|
"loss": 1.4456, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 15.625, |
|
"learning_rate": 8.96842105263158e-06, |
|
"loss": 1.5282, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 26.375, |
|
"learning_rate": 8.952631578947368e-06, |
|
"loss": 1.1709, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 19.75, |
|
"learning_rate": 8.936842105263158e-06, |
|
"loss": 1.3028, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 13.625, |
|
"learning_rate": 8.921052631578947e-06, |
|
"loss": 1.3497, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 16.875, |
|
"learning_rate": 8.905263157894737e-06, |
|
"loss": 1.3032, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 14.3125, |
|
"learning_rate": 8.889473684210526e-06, |
|
"loss": 1.282, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 22.625, |
|
"learning_rate": 8.873684210526316e-06, |
|
"loss": 1.404, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 22.125, |
|
"learning_rate": 8.857894736842105e-06, |
|
"loss": 1.2736, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 20.375, |
|
"learning_rate": 8.842105263157893e-06, |
|
"loss": 1.4249, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 33.5, |
|
"learning_rate": 8.826315789473684e-06, |
|
"loss": 1.484, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 27.25, |
|
"learning_rate": 8.810526315789474e-06, |
|
"loss": 1.5996, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 24.875, |
|
"learning_rate": 8.794736842105264e-06, |
|
"loss": 1.5918, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 18.25, |
|
"learning_rate": 8.778947368421053e-06, |
|
"loss": 1.3844, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 20.0, |
|
"learning_rate": 8.763157894736843e-06, |
|
"loss": 1.3872, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 14.375, |
|
"learning_rate": 8.747368421052632e-06, |
|
"loss": 1.181, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 26.25, |
|
"learning_rate": 8.731578947368422e-06, |
|
"loss": 1.3088, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 23.25, |
|
"learning_rate": 8.715789473684211e-06, |
|
"loss": 1.2746, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 32.0, |
|
"learning_rate": 8.7e-06, |
|
"loss": 1.2783, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 8.68421052631579e-06, |
|
"loss": 1.3055, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 16.75, |
|
"learning_rate": 8.668421052631579e-06, |
|
"loss": 1.2688, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 28.25, |
|
"learning_rate": 8.652631578947369e-06, |
|
"loss": 1.3703, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 22.125, |
|
"learning_rate": 8.636842105263158e-06, |
|
"loss": 1.2664, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 18.875, |
|
"learning_rate": 8.621052631578948e-06, |
|
"loss": 1.3989, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 22.0, |
|
"learning_rate": 8.605263157894737e-06, |
|
"loss": 1.5021, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 18.75, |
|
"learning_rate": 8.589473684210525e-06, |
|
"loss": 1.4328, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 30.75, |
|
"learning_rate": 8.573684210526316e-06, |
|
"loss": 1.3705, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 22.625, |
|
"learning_rate": 8.557894736842104e-06, |
|
"loss": 1.0242, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 22.5, |
|
"learning_rate": 8.542105263157896e-06, |
|
"loss": 0.8372, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 17.875, |
|
"learning_rate": 8.526315789473685e-06, |
|
"loss": 1.4703, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 19.375, |
|
"learning_rate": 8.510526315789475e-06, |
|
"loss": 1.456, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 25.875, |
|
"learning_rate": 8.494736842105264e-06, |
|
"loss": 1.8036, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 17.625, |
|
"learning_rate": 8.478947368421053e-06, |
|
"loss": 1.7276, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 29.125, |
|
"learning_rate": 8.463157894736843e-06, |
|
"loss": 1.1681, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 18.875, |
|
"learning_rate": 8.447368421052632e-06, |
|
"loss": 1.5285, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 30.625, |
|
"learning_rate": 8.431578947368422e-06, |
|
"loss": 1.531, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 19.375, |
|
"learning_rate": 8.41578947368421e-06, |
|
"loss": 1.1936, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 18.75, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 1.5973, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 14.4375, |
|
"learning_rate": 8.38421052631579e-06, |
|
"loss": 1.5106, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 39.0, |
|
"learning_rate": 8.368421052631578e-06, |
|
"loss": 1.5852, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 18.75, |
|
"learning_rate": 8.352631578947369e-06, |
|
"loss": 1.2943, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 19.125, |
|
"learning_rate": 8.336842105263157e-06, |
|
"loss": 1.3911, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 26.25, |
|
"learning_rate": 8.321052631578947e-06, |
|
"loss": 1.5374, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 33.75, |
|
"learning_rate": 8.305263157894736e-06, |
|
"loss": 1.5726, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 23.375, |
|
"learning_rate": 8.289473684210526e-06, |
|
"loss": 1.3592, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 20.25, |
|
"learning_rate": 8.273684210526317e-06, |
|
"loss": 1.378, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 27.375, |
|
"learning_rate": 8.257894736842105e-06, |
|
"loss": 1.5718, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 25.25, |
|
"learning_rate": 8.242105263157896e-06, |
|
"loss": 1.5038, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 13.1875, |
|
"learning_rate": 8.226315789473684e-06, |
|
"loss": 1.4199, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 19.25, |
|
"learning_rate": 8.210526315789475e-06, |
|
"loss": 1.1204, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 14.5625, |
|
"learning_rate": 8.194736842105263e-06, |
|
"loss": 1.3619, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 21.375, |
|
"learning_rate": 8.178947368421054e-06, |
|
"loss": 1.9199, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 21.0, |
|
"learning_rate": 8.163157894736842e-06, |
|
"loss": 1.2522, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 21.75, |
|
"learning_rate": 8.147368421052631e-06, |
|
"loss": 1.4897, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 22.25, |
|
"learning_rate": 8.131578947368421e-06, |
|
"loss": 1.4521, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 22.875, |
|
"learning_rate": 8.11578947368421e-06, |
|
"loss": 1.554, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 16.5, |
|
"learning_rate": 8.1e-06, |
|
"loss": 1.562, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 16.375, |
|
"learning_rate": 8.084210526315789e-06, |
|
"loss": 1.6152, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 21.0, |
|
"learning_rate": 8.06842105263158e-06, |
|
"loss": 1.3458, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 24.375, |
|
"learning_rate": 8.052631578947368e-06, |
|
"loss": 1.2805, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 18.375, |
|
"learning_rate": 8.036842105263157e-06, |
|
"loss": 1.3182, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 33.5, |
|
"learning_rate": 8.021052631578949e-06, |
|
"loss": 1.2983, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 23.875, |
|
"learning_rate": 8.005263157894737e-06, |
|
"loss": 1.4759, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 18.5, |
|
"learning_rate": 7.989473684210528e-06, |
|
"loss": 1.5965, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 20.25, |
|
"learning_rate": 7.973684210526316e-06, |
|
"loss": 1.349, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 15.5, |
|
"learning_rate": 7.957894736842107e-06, |
|
"loss": 1.4254, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 12.0, |
|
"learning_rate": 7.942105263157895e-06, |
|
"loss": 1.2403, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 17.375, |
|
"learning_rate": 7.926315789473684e-06, |
|
"loss": 1.3203, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 17.0, |
|
"learning_rate": 7.910526315789474e-06, |
|
"loss": 1.2978, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 21.375, |
|
"learning_rate": 7.894736842105263e-06, |
|
"loss": 1.2262, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 15.875, |
|
"learning_rate": 7.878947368421053e-06, |
|
"loss": 1.4521, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 13.4375, |
|
"learning_rate": 7.863157894736842e-06, |
|
"loss": 1.713, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 22.0, |
|
"learning_rate": 7.847368421052632e-06, |
|
"loss": 1.4544, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 65.5, |
|
"learning_rate": 7.831578947368421e-06, |
|
"loss": 1.5237, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 16.625, |
|
"learning_rate": 7.81578947368421e-06, |
|
"loss": 1.3374, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 15.625, |
|
"learning_rate": 7.8e-06, |
|
"loss": 1.5768, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 11.0625, |
|
"learning_rate": 7.784210526315789e-06, |
|
"loss": 1.0055, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 19.125, |
|
"learning_rate": 7.768421052631579e-06, |
|
"loss": 1.2459, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 20.625, |
|
"learning_rate": 7.75263157894737e-06, |
|
"loss": 1.4636, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 21.0, |
|
"learning_rate": 7.73684210526316e-06, |
|
"loss": 1.3925, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 17.125, |
|
"learning_rate": 7.721052631578948e-06, |
|
"loss": 1.1989, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 27.25, |
|
"learning_rate": 7.705263157894737e-06, |
|
"loss": 1.4218, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 38.25, |
|
"learning_rate": 7.689473684210527e-06, |
|
"loss": 1.5287, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 15.3125, |
|
"learning_rate": 7.673684210526316e-06, |
|
"loss": 1.3331, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 10.0, |
|
"learning_rate": 7.657894736842106e-06, |
|
"loss": 1.562, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 24.0, |
|
"learning_rate": 7.642105263157895e-06, |
|
"loss": 1.5789, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 29.375, |
|
"learning_rate": 7.626315789473685e-06, |
|
"loss": 1.2715, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 19.0, |
|
"learning_rate": 7.610526315789474e-06, |
|
"loss": 1.5294, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 23.0, |
|
"learning_rate": 7.594736842105262e-06, |
|
"loss": 1.2915, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 19.125, |
|
"learning_rate": 7.578947368421053e-06, |
|
"loss": 1.2321, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 23.75, |
|
"learning_rate": 7.563157894736842e-06, |
|
"loss": 1.3039, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 29.375, |
|
"learning_rate": 7.5473684210526326e-06, |
|
"loss": 1.2118, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 26.125, |
|
"learning_rate": 7.531578947368421e-06, |
|
"loss": 1.3432, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 26.25, |
|
"learning_rate": 7.5157894736842115e-06, |
|
"loss": 1.2973, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 17.5, |
|
"learning_rate": 7.5e-06, |
|
"loss": 1.3631, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 19.25, |
|
"learning_rate": 7.48421052631579e-06, |
|
"loss": 1.5015, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 15.0, |
|
"learning_rate": 7.468421052631579e-06, |
|
"loss": 1.0094, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 18.5, |
|
"learning_rate": 7.452631578947369e-06, |
|
"loss": 1.4658, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 25.5, |
|
"learning_rate": 7.436842105263157e-06, |
|
"loss": 1.2255, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 22.125, |
|
"learning_rate": 7.421052631578948e-06, |
|
"loss": 1.1919, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 19.75, |
|
"learning_rate": 7.405263157894737e-06, |
|
"loss": 1.2764, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 20.875, |
|
"learning_rate": 7.389473684210527e-06, |
|
"loss": 1.5922, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 15.375, |
|
"learning_rate": 7.373684210526316e-06, |
|
"loss": 1.3, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 23.75, |
|
"learning_rate": 7.357894736842106e-06, |
|
"loss": 1.3065, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 14.25, |
|
"learning_rate": 7.342105263157895e-06, |
|
"loss": 1.35, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 15.8125, |
|
"learning_rate": 7.326315789473684e-06, |
|
"loss": 0.9403, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 44.75, |
|
"learning_rate": 7.310526315789473e-06, |
|
"loss": 1.5757, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 40.25, |
|
"learning_rate": 7.2947368421052636e-06, |
|
"loss": 1.7024, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 20.625, |
|
"learning_rate": 7.278947368421053e-06, |
|
"loss": 1.7692, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 25.5, |
|
"learning_rate": 7.2631578947368426e-06, |
|
"loss": 1.4494, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 22.75, |
|
"learning_rate": 7.247368421052632e-06, |
|
"loss": 1.3622, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 8.75, |
|
"learning_rate": 7.2315789473684215e-06, |
|
"loss": 1.1007, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 16.375, |
|
"learning_rate": 7.21578947368421e-06, |
|
"loss": 1.2776, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 23.125, |
|
"learning_rate": 7.2e-06, |
|
"loss": 1.2975, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 23.0, |
|
"learning_rate": 7.184210526315789e-06, |
|
"loss": 1.3911, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 16.375, |
|
"learning_rate": 7.1684210526315795e-06, |
|
"loss": 1.0816, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 23.125, |
|
"learning_rate": 7.152631578947369e-06, |
|
"loss": 1.0682, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 14.0625, |
|
"learning_rate": 7.1368421052631585e-06, |
|
"loss": 1.2556, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 14.75, |
|
"learning_rate": 7.121052631578948e-06, |
|
"loss": 1.4108, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 24.5, |
|
"learning_rate": 7.105263157894737e-06, |
|
"loss": 1.3118, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 8.75, |
|
"learning_rate": 7.089473684210526e-06, |
|
"loss": 1.2675, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 23.0, |
|
"learning_rate": 7.073684210526316e-06, |
|
"loss": 1.4626, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 17.5, |
|
"learning_rate": 7.057894736842105e-06, |
|
"loss": 1.6127, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 24.875, |
|
"learning_rate": 7.042105263157895e-06, |
|
"loss": 1.4965, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 15.9375, |
|
"learning_rate": 7.026315789473685e-06, |
|
"loss": 1.5175, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 21.625, |
|
"learning_rate": 7.010526315789474e-06, |
|
"loss": 1.181, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 21.125, |
|
"learning_rate": 6.994736842105263e-06, |
|
"loss": 1.0776, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 24.875, |
|
"learning_rate": 6.9789473684210525e-06, |
|
"loss": 1.2104, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 33.25, |
|
"learning_rate": 6.963157894736842e-06, |
|
"loss": 1.3473, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 20.0, |
|
"learning_rate": 6.9473684210526315e-06, |
|
"loss": 1.2039, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 10.125, |
|
"learning_rate": 6.931578947368421e-06, |
|
"loss": 1.1147, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 14.25, |
|
"learning_rate": 6.9157894736842105e-06, |
|
"loss": 1.0561, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 11.9375, |
|
"learning_rate": 6.900000000000001e-06, |
|
"loss": 1.3285, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 68.5, |
|
"learning_rate": 6.8842105263157895e-06, |
|
"loss": 1.4085, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 26.0, |
|
"learning_rate": 6.868421052631579e-06, |
|
"loss": 1.4786, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 20.125, |
|
"learning_rate": 6.8526315789473685e-06, |
|
"loss": 1.1576, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 15.5, |
|
"learning_rate": 6.836842105263158e-06, |
|
"loss": 1.2392, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 24.125, |
|
"learning_rate": 6.8210526315789475e-06, |
|
"loss": 1.5732, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 28.0, |
|
"learning_rate": 6.805263157894737e-06, |
|
"loss": 1.6376, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 6.7894736842105264e-06, |
|
"loss": 1.4348, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 36.5, |
|
"learning_rate": 6.773684210526317e-06, |
|
"loss": 1.1923, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 16.625, |
|
"learning_rate": 6.7578947368421054e-06, |
|
"loss": 1.3055, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 16.5, |
|
"learning_rate": 6.742105263157895e-06, |
|
"loss": 1.5263, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 21.25, |
|
"learning_rate": 6.726315789473684e-06, |
|
"loss": 1.4074, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 11.0, |
|
"learning_rate": 6.710526315789474e-06, |
|
"loss": 1.2683, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 25.125, |
|
"learning_rate": 6.694736842105263e-06, |
|
"loss": 1.2666, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 17.875, |
|
"learning_rate": 6.678947368421053e-06, |
|
"loss": 1.6824, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 22.0, |
|
"learning_rate": 6.663157894736842e-06, |
|
"loss": 1.9028, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 29.0, |
|
"learning_rate": 6.647368421052631e-06, |
|
"loss": 1.5481, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 15.3125, |
|
"learning_rate": 6.631578947368421e-06, |
|
"loss": 1.4571, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 19.5, |
|
"learning_rate": 6.615789473684211e-06, |
|
"loss": 1.3493, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 18.125, |
|
"learning_rate": 6.6e-06, |
|
"loss": 1.1466, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 18.5, |
|
"learning_rate": 6.58421052631579e-06, |
|
"loss": 1.2788, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 23.5, |
|
"learning_rate": 6.568421052631579e-06, |
|
"loss": 1.2194, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 24.375, |
|
"learning_rate": 6.552631578947369e-06, |
|
"loss": 1.1311, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 15.9375, |
|
"learning_rate": 6.5368421052631575e-06, |
|
"loss": 0.91, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 17.5, |
|
"learning_rate": 6.521052631578947e-06, |
|
"loss": 1.5186, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 26.0, |
|
"learning_rate": 6.505263157894737e-06, |
|
"loss": 1.2989, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 21.375, |
|
"learning_rate": 6.489473684210527e-06, |
|
"loss": 1.0283, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 25.5, |
|
"learning_rate": 6.473684210526316e-06, |
|
"loss": 1.4068, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 14.125, |
|
"learning_rate": 6.457894736842106e-06, |
|
"loss": 1.5789, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 16.625, |
|
"learning_rate": 6.442105263157895e-06, |
|
"loss": 1.1244, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 19.75, |
|
"learning_rate": 6.426315789473684e-06, |
|
"loss": 1.2412, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 10.3125, |
|
"learning_rate": 6.410526315789473e-06, |
|
"loss": 1.336, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 22.625, |
|
"learning_rate": 6.394736842105263e-06, |
|
"loss": 1.2082, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 22.375, |
|
"learning_rate": 6.378947368421053e-06, |
|
"loss": 1.3572, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 15.875, |
|
"learning_rate": 6.363157894736843e-06, |
|
"loss": 1.4054, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 25.625, |
|
"learning_rate": 6.347368421052632e-06, |
|
"loss": 1.4423, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 21.5, |
|
"learning_rate": 6.331578947368422e-06, |
|
"loss": 1.1326, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 33.25, |
|
"learning_rate": 6.31578947368421e-06, |
|
"loss": 1.2669, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_loss": 1.3724089860916138, |
|
"eval_runtime": 30.7072, |
|
"eval_samples_per_second": 32.566, |
|
"eval_steps_per_second": 32.566, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 24.375, |
|
"learning_rate": 6.3e-06, |
|
"loss": 1.2768, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 40.5, |
|
"learning_rate": 6.284210526315789e-06, |
|
"loss": 1.2205, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 28.875, |
|
"learning_rate": 6.268421052631579e-06, |
|
"loss": 1.4383, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 25.125, |
|
"learning_rate": 6.252631578947368e-06, |
|
"loss": 1.3299, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 39.5, |
|
"learning_rate": 6.236842105263159e-06, |
|
"loss": 1.5192, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 19.0, |
|
"learning_rate": 6.221052631578948e-06, |
|
"loss": 1.4088, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 26.5, |
|
"learning_rate": 6.205263157894737e-06, |
|
"loss": 1.4427, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 18.25, |
|
"learning_rate": 6.189473684210526e-06, |
|
"loss": 1.2549, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 23.25, |
|
"learning_rate": 6.173684210526316e-06, |
|
"loss": 1.7159, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 15.1875, |
|
"learning_rate": 6.157894736842105e-06, |
|
"loss": 1.2445, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 20.375, |
|
"learning_rate": 6.142105263157895e-06, |
|
"loss": 0.9629, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 11.125, |
|
"learning_rate": 6.126315789473684e-06, |
|
"loss": 1.3391, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 17.75, |
|
"learning_rate": 6.1105263157894746e-06, |
|
"loss": 1.1964, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 26.875, |
|
"learning_rate": 6.094736842105263e-06, |
|
"loss": 1.1751, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 14.0625, |
|
"learning_rate": 6.078947368421053e-06, |
|
"loss": 1.2314, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 21.5, |
|
"learning_rate": 6.063157894736842e-06, |
|
"loss": 1.3675, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 30.75, |
|
"learning_rate": 6.047368421052632e-06, |
|
"loss": 1.0896, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 18.25, |
|
"learning_rate": 6.031578947368421e-06, |
|
"loss": 1.5274, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 17.25, |
|
"learning_rate": 6.015789473684211e-06, |
|
"loss": 1.2702, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 21.5, |
|
"learning_rate": 6e-06, |
|
"loss": 1.5554, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 21.0, |
|
"learning_rate": 5.98421052631579e-06, |
|
"loss": 1.5431, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 13.1875, |
|
"learning_rate": 5.968421052631579e-06, |
|
"loss": 1.6954, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 15.9375, |
|
"learning_rate": 5.952631578947369e-06, |
|
"loss": 1.5684, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 11.8125, |
|
"learning_rate": 5.936842105263158e-06, |
|
"loss": 1.2937, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 14.1875, |
|
"learning_rate": 5.921052631578948e-06, |
|
"loss": 1.4135, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 19.625, |
|
"learning_rate": 5.905263157894737e-06, |
|
"loss": 1.1792, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 18.0, |
|
"learning_rate": 5.889473684210527e-06, |
|
"loss": 1.4643, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 20.5, |
|
"learning_rate": 5.873684210526315e-06, |
|
"loss": 1.4952, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 22.0, |
|
"learning_rate": 5.857894736842105e-06, |
|
"loss": 1.4588, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 20.5, |
|
"learning_rate": 5.842105263157895e-06, |
|
"loss": 1.5374, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 26.625, |
|
"learning_rate": 5.8263157894736846e-06, |
|
"loss": 1.4717, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 20.0, |
|
"learning_rate": 5.810526315789474e-06, |
|
"loss": 1.1332, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 5.7947368421052635e-06, |
|
"loss": 1.4775, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 22.5, |
|
"learning_rate": 5.778947368421053e-06, |
|
"loss": 1.7034, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 18.875, |
|
"learning_rate": 5.763157894736842e-06, |
|
"loss": 1.3331, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 10.3125, |
|
"learning_rate": 5.747368421052631e-06, |
|
"loss": 1.1408, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 21.875, |
|
"learning_rate": 5.731578947368421e-06, |
|
"loss": 1.6183, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 27.375, |
|
"learning_rate": 5.715789473684211e-06, |
|
"loss": 1.282, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 17.0, |
|
"learning_rate": 5.7000000000000005e-06, |
|
"loss": 1.4119, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 19.125, |
|
"learning_rate": 5.68421052631579e-06, |
|
"loss": 1.3382, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 17.0, |
|
"learning_rate": 5.6684210526315795e-06, |
|
"loss": 1.6847, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 18.75, |
|
"learning_rate": 5.652631578947368e-06, |
|
"loss": 1.3082, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 29.875, |
|
"learning_rate": 5.636842105263158e-06, |
|
"loss": 1.3222, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 13.125, |
|
"learning_rate": 5.621052631578947e-06, |
|
"loss": 1.4203, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 22.5, |
|
"learning_rate": 5.605263157894737e-06, |
|
"loss": 1.3574, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 44.0, |
|
"learning_rate": 5.589473684210527e-06, |
|
"loss": 1.421, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 28.625, |
|
"learning_rate": 5.573684210526316e-06, |
|
"loss": 1.5993, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 19.375, |
|
"learning_rate": 5.557894736842106e-06, |
|
"loss": 1.2983, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 23.625, |
|
"learning_rate": 5.542105263157895e-06, |
|
"loss": 1.3245, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 27.125, |
|
"learning_rate": 5.526315789473684e-06, |
|
"loss": 1.3728, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 17.75, |
|
"learning_rate": 5.5105263157894735e-06, |
|
"loss": 1.2081, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 27.875, |
|
"learning_rate": 5.494736842105263e-06, |
|
"loss": 1.2897, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 17.75, |
|
"learning_rate": 5.4789473684210525e-06, |
|
"loss": 1.373, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 19.5, |
|
"learning_rate": 5.463157894736842e-06, |
|
"loss": 1.4809, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 43.25, |
|
"learning_rate": 5.447368421052632e-06, |
|
"loss": 1.4633, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 37.75, |
|
"learning_rate": 5.431578947368422e-06, |
|
"loss": 1.3994, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 25.625, |
|
"learning_rate": 5.4157894736842105e-06, |
|
"loss": 1.3844, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 15.125, |
|
"learning_rate": 5.4e-06, |
|
"loss": 1.3222, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 21.5, |
|
"learning_rate": 5.3842105263157895e-06, |
|
"loss": 1.2739, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 76.0, |
|
"learning_rate": 5.368421052631579e-06, |
|
"loss": 1.6814, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 15.25, |
|
"learning_rate": 5.3526315789473684e-06, |
|
"loss": 1.3229, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 21.25, |
|
"learning_rate": 5.336842105263158e-06, |
|
"loss": 1.7966, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 28.625, |
|
"learning_rate": 5.321052631578948e-06, |
|
"loss": 1.2918, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 20.75, |
|
"learning_rate": 5.305263157894737e-06, |
|
"loss": 1.2418, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 10.125, |
|
"learning_rate": 5.289473684210526e-06, |
|
"loss": 1.0768, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 21.875, |
|
"learning_rate": 5.273684210526316e-06, |
|
"loss": 0.9163, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 21.375, |
|
"learning_rate": 5.257894736842105e-06, |
|
"loss": 1.3902, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 24.0, |
|
"learning_rate": 5.242105263157895e-06, |
|
"loss": 1.2549, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 29.25, |
|
"learning_rate": 5.226315789473684e-06, |
|
"loss": 1.4466, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 20.375, |
|
"learning_rate": 5.210526315789474e-06, |
|
"loss": 1.2282, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 28.5, |
|
"learning_rate": 5.194736842105263e-06, |
|
"loss": 1.5241, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 18.75, |
|
"learning_rate": 5.178947368421053e-06, |
|
"loss": 1.4206, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 20.375, |
|
"learning_rate": 5.163157894736842e-06, |
|
"loss": 1.5183, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 31.25, |
|
"learning_rate": 5.147368421052632e-06, |
|
"loss": 1.1815, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 24.375, |
|
"learning_rate": 5.131578947368421e-06, |
|
"loss": 1.2572, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 14.375, |
|
"learning_rate": 5.115789473684211e-06, |
|
"loss": 1.4552, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 19.25, |
|
"learning_rate": 5.1e-06, |
|
"loss": 1.5616, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 35.5, |
|
"learning_rate": 5.084210526315789e-06, |
|
"loss": 1.37, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 15.625, |
|
"learning_rate": 5.0684210526315784e-06, |
|
"loss": 1.6033, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 22.875, |
|
"learning_rate": 5.052631578947369e-06, |
|
"loss": 1.3356, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 35.5, |
|
"learning_rate": 5.036842105263158e-06, |
|
"loss": 1.5591, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 28.875, |
|
"learning_rate": 5.021052631578948e-06, |
|
"loss": 1.2637, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 20.125, |
|
"learning_rate": 5.005263157894737e-06, |
|
"loss": 1.2172, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 20.75, |
|
"learning_rate": 4.989473684210527e-06, |
|
"loss": 1.3636, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 16.625, |
|
"learning_rate": 4.973684210526315e-06, |
|
"loss": 1.3217, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 23.75, |
|
"learning_rate": 4.957894736842105e-06, |
|
"loss": 1.4014, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 15.5, |
|
"learning_rate": 4.942105263157894e-06, |
|
"loss": 1.4636, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 19.0, |
|
"learning_rate": 4.926315789473685e-06, |
|
"loss": 1.4897, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 24.5, |
|
"learning_rate": 4.910526315789474e-06, |
|
"loss": 1.2786, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 20.25, |
|
"learning_rate": 4.894736842105264e-06, |
|
"loss": 1.4247, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 22.25, |
|
"learning_rate": 4.878947368421053e-06, |
|
"loss": 1.2238, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 23.75, |
|
"learning_rate": 4.863157894736842e-06, |
|
"loss": 1.5511, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 14.1875, |
|
"learning_rate": 4.847368421052631e-06, |
|
"loss": 1.3549, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 22.875, |
|
"learning_rate": 4.831578947368421e-06, |
|
"loss": 1.2673, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 19.0, |
|
"learning_rate": 4.81578947368421e-06, |
|
"loss": 1.4216, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 17.875, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.1628, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 17.375, |
|
"learning_rate": 4.78421052631579e-06, |
|
"loss": 1.2581, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 18.375, |
|
"learning_rate": 4.76842105263158e-06, |
|
"loss": 1.3978, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 20.25, |
|
"learning_rate": 4.752631578947368e-06, |
|
"loss": 1.1552, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 19.75, |
|
"learning_rate": 4.736842105263158e-06, |
|
"loss": 1.5662, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 25.375, |
|
"learning_rate": 4.721052631578947e-06, |
|
"loss": 1.1584, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 17.25, |
|
"learning_rate": 4.705263157894737e-06, |
|
"loss": 1.4341, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 33.5, |
|
"learning_rate": 4.689473684210526e-06, |
|
"loss": 1.3088, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 17.5, |
|
"learning_rate": 4.673684210526316e-06, |
|
"loss": 1.4855, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 33.25, |
|
"learning_rate": 4.657894736842106e-06, |
|
"loss": 1.2726, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 24.75, |
|
"learning_rate": 4.642105263157895e-06, |
|
"loss": 1.2899, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 22.875, |
|
"learning_rate": 4.626315789473684e-06, |
|
"loss": 1.4073, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 16.5, |
|
"learning_rate": 4.610526315789474e-06, |
|
"loss": 0.8528, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 18.25, |
|
"learning_rate": 4.594736842105263e-06, |
|
"loss": 1.6042, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 21.625, |
|
"learning_rate": 4.578947368421053e-06, |
|
"loss": 1.3084, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 31.5, |
|
"learning_rate": 4.563157894736842e-06, |
|
"loss": 1.3855, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 21.125, |
|
"learning_rate": 4.547368421052632e-06, |
|
"loss": 1.1414, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 18.5, |
|
"learning_rate": 4.531578947368421e-06, |
|
"loss": 1.4506, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 13.8125, |
|
"learning_rate": 4.515789473684211e-06, |
|
"loss": 1.1001, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 19.0, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.2032, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 24.25, |
|
"learning_rate": 4.48421052631579e-06, |
|
"loss": 1.2787, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 17.625, |
|
"learning_rate": 4.468421052631579e-06, |
|
"loss": 1.5579, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 28.5, |
|
"learning_rate": 4.452631578947369e-06, |
|
"loss": 1.3317, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 19.625, |
|
"learning_rate": 4.436842105263158e-06, |
|
"loss": 1.459, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 25.5, |
|
"learning_rate": 4.421052631578947e-06, |
|
"loss": 1.1764, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 19.5, |
|
"learning_rate": 4.405263157894737e-06, |
|
"loss": 1.5463, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 22.25, |
|
"learning_rate": 4.3894736842105266e-06, |
|
"loss": 1.4262, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 7.6875, |
|
"learning_rate": 4.373684210526316e-06, |
|
"loss": 1.2997, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 13.875, |
|
"learning_rate": 4.3578947368421055e-06, |
|
"loss": 1.5425, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.342105263157895e-06, |
|
"loss": 1.2184, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 30.0, |
|
"learning_rate": 4.3263157894736845e-06, |
|
"loss": 1.2771, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 20.125, |
|
"learning_rate": 4.310526315789474e-06, |
|
"loss": 1.3415, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 9.0625, |
|
"learning_rate": 4.294736842105263e-06, |
|
"loss": 1.0402, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 21.0, |
|
"learning_rate": 4.278947368421052e-06, |
|
"loss": 1.4532, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 25.75, |
|
"learning_rate": 4.2631578947368425e-06, |
|
"loss": 1.5666, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 32.25, |
|
"learning_rate": 4.247368421052632e-06, |
|
"loss": 1.1727, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 15.5625, |
|
"learning_rate": 4.2315789473684215e-06, |
|
"loss": 1.0616, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 19.5, |
|
"learning_rate": 4.215789473684211e-06, |
|
"loss": 1.4833, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 20.25, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 1.1481, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 25.25, |
|
"learning_rate": 4.184210526315789e-06, |
|
"loss": 1.5027, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 17.75, |
|
"learning_rate": 4.168421052631579e-06, |
|
"loss": 1.4471, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 31.625, |
|
"learning_rate": 4.152631578947368e-06, |
|
"loss": 1.2989, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 25.0, |
|
"learning_rate": 4.136842105263158e-06, |
|
"loss": 1.2379, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 25.0, |
|
"learning_rate": 4.121052631578948e-06, |
|
"loss": 1.2729, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 33.0, |
|
"learning_rate": 4.105263157894737e-06, |
|
"loss": 1.3349, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 20.75, |
|
"learning_rate": 4.089473684210527e-06, |
|
"loss": 1.6087, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 18.25, |
|
"learning_rate": 4.0736842105263155e-06, |
|
"loss": 1.3135, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 24.125, |
|
"learning_rate": 4.057894736842105e-06, |
|
"loss": 1.5614, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 20.5, |
|
"learning_rate": 4.0421052631578945e-06, |
|
"loss": 1.4008, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 25.0, |
|
"learning_rate": 4.026315789473684e-06, |
|
"loss": 1.2597, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 25.5, |
|
"learning_rate": 4.010526315789474e-06, |
|
"loss": 1.2063, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 17.625, |
|
"learning_rate": 3.994736842105264e-06, |
|
"loss": 1.1178, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 47.25, |
|
"learning_rate": 3.978947368421053e-06, |
|
"loss": 1.7909, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 20.5, |
|
"learning_rate": 3.963157894736842e-06, |
|
"loss": 1.5256, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 17.75, |
|
"learning_rate": 3.9473684210526315e-06, |
|
"loss": 1.14, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 20.625, |
|
"learning_rate": 3.931578947368421e-06, |
|
"loss": 1.2839, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 22.125, |
|
"learning_rate": 3.9157894736842104e-06, |
|
"loss": 1.3771, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 10.625, |
|
"learning_rate": 3.9e-06, |
|
"loss": 1.3, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 11.0, |
|
"learning_rate": 3.884210526315789e-06, |
|
"loss": 0.8222, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 16.375, |
|
"learning_rate": 3.86842105263158e-06, |
|
"loss": 1.5128, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 12.0625, |
|
"learning_rate": 3.852631578947368e-06, |
|
"loss": 1.4502, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 16.375, |
|
"learning_rate": 3.836842105263158e-06, |
|
"loss": 1.0498, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 16.5, |
|
"learning_rate": 3.821052631578947e-06, |
|
"loss": 1.2714, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 24.5, |
|
"learning_rate": 3.805263157894737e-06, |
|
"loss": 1.0995, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 31.5, |
|
"learning_rate": 3.7894736842105264e-06, |
|
"loss": 1.3249, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 21.5, |
|
"learning_rate": 3.7736842105263163e-06, |
|
"loss": 1.4243, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 29.625, |
|
"learning_rate": 3.7578947368421058e-06, |
|
"loss": 1.4749, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 31.625, |
|
"learning_rate": 3.742105263157895e-06, |
|
"loss": 1.3472, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 13.125, |
|
"learning_rate": 3.7263157894736843e-06, |
|
"loss": 1.2162, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 23.125, |
|
"learning_rate": 3.710526315789474e-06, |
|
"loss": 1.539, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 21.625, |
|
"learning_rate": 3.6947368421052633e-06, |
|
"loss": 1.4024, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 15.6875, |
|
"learning_rate": 3.678947368421053e-06, |
|
"loss": 1.2368, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 43.0, |
|
"learning_rate": 3.663157894736842e-06, |
|
"loss": 1.3536, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 15.5625, |
|
"learning_rate": 3.6473684210526318e-06, |
|
"loss": 1.4893, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 12.25, |
|
"learning_rate": 3.6315789473684213e-06, |
|
"loss": 1.197, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 23.375, |
|
"learning_rate": 3.6157894736842108e-06, |
|
"loss": 1.3298, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 14.75, |
|
"learning_rate": 3.6e-06, |
|
"loss": 0.8856, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 27.875, |
|
"learning_rate": 3.5842105263157898e-06, |
|
"loss": 1.3668, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 13.0, |
|
"learning_rate": 3.5684210526315792e-06, |
|
"loss": 1.6462, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 19.625, |
|
"learning_rate": 3.5526315789473683e-06, |
|
"loss": 1.3835, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 14.1875, |
|
"learning_rate": 3.536842105263158e-06, |
|
"loss": 1.3977, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 27.75, |
|
"learning_rate": 3.5210526315789473e-06, |
|
"loss": 1.5368, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 12.25, |
|
"learning_rate": 3.505263157894737e-06, |
|
"loss": 1.781, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 22.5, |
|
"learning_rate": 3.4894736842105263e-06, |
|
"loss": 1.6009, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 45.25, |
|
"learning_rate": 3.4736842105263158e-06, |
|
"loss": 0.9749, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 23.875, |
|
"learning_rate": 3.4578947368421053e-06, |
|
"loss": 1.4825, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 17.125, |
|
"learning_rate": 3.4421052631578947e-06, |
|
"loss": 1.4344, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 21.5, |
|
"learning_rate": 3.4263157894736842e-06, |
|
"loss": 1.2112, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 12.25, |
|
"learning_rate": 3.4105263157894737e-06, |
|
"loss": 1.1979, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 7.8125, |
|
"learning_rate": 3.3947368421052632e-06, |
|
"loss": 1.1706, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 10.8125, |
|
"learning_rate": 3.3789473684210527e-06, |
|
"loss": 1.4322, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 25.875, |
|
"learning_rate": 3.363157894736842e-06, |
|
"loss": 1.2244, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 18.875, |
|
"learning_rate": 3.3473684210526317e-06, |
|
"loss": 1.4572, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 20.375, |
|
"learning_rate": 3.331578947368421e-06, |
|
"loss": 1.3651, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 18.625, |
|
"learning_rate": 3.3157894736842107e-06, |
|
"loss": 1.1065, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 41.75, |
|
"learning_rate": 3.3e-06, |
|
"loss": 1.3439, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 14.25, |
|
"learning_rate": 3.2842105263157897e-06, |
|
"loss": 1.2267, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 37.25, |
|
"learning_rate": 3.2684210526315787e-06, |
|
"loss": 1.2631, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 19.125, |
|
"learning_rate": 3.2526315789473686e-06, |
|
"loss": 1.3705, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 19.75, |
|
"learning_rate": 3.236842105263158e-06, |
|
"loss": 1.0897, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 25.125, |
|
"learning_rate": 3.2210526315789476e-06, |
|
"loss": 1.3476, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 19.75, |
|
"learning_rate": 3.2052631578947367e-06, |
|
"loss": 1.5058, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 23.625, |
|
"learning_rate": 3.1894736842105266e-06, |
|
"loss": 1.6535, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 11.5625, |
|
"learning_rate": 3.173684210526316e-06, |
|
"loss": 1.0905, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 20.125, |
|
"learning_rate": 3.157894736842105e-06, |
|
"loss": 1.2503, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"eval_loss": 1.3293771743774414, |
|
"eval_runtime": 30.7768, |
|
"eval_samples_per_second": 32.492, |
|
"eval_steps_per_second": 32.492, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 22.625, |
|
"learning_rate": 3.1421052631578947e-06, |
|
"loss": 1.3055, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 23.75, |
|
"learning_rate": 3.126315789473684e-06, |
|
"loss": 1.3945, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 34.5, |
|
"learning_rate": 3.110526315789474e-06, |
|
"loss": 1.4867, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 25.125, |
|
"learning_rate": 3.094736842105263e-06, |
|
"loss": 1.208, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 24.0, |
|
"learning_rate": 3.0789473684210526e-06, |
|
"loss": 1.1104, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 21.5, |
|
"learning_rate": 3.063157894736842e-06, |
|
"loss": 1.4004, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 19.375, |
|
"learning_rate": 3.0473684210526316e-06, |
|
"loss": 1.4243, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 26.375, |
|
"learning_rate": 3.031578947368421e-06, |
|
"loss": 1.422, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 20.875, |
|
"learning_rate": 3.0157894736842106e-06, |
|
"loss": 1.6564, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 24.75, |
|
"learning_rate": 3e-06, |
|
"loss": 1.3558, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 24.625, |
|
"learning_rate": 2.9842105263157896e-06, |
|
"loss": 1.3871, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 37.0, |
|
"learning_rate": 2.968421052631579e-06, |
|
"loss": 1.8112, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 11.625, |
|
"learning_rate": 2.9526315789473685e-06, |
|
"loss": 1.0642, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 18.0, |
|
"learning_rate": 2.9368421052631576e-06, |
|
"loss": 1.2618, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 20.625, |
|
"learning_rate": 2.9210526315789475e-06, |
|
"loss": 1.1913, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 17.25, |
|
"learning_rate": 2.905263157894737e-06, |
|
"loss": 1.3142, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 28.75, |
|
"learning_rate": 2.8894736842105265e-06, |
|
"loss": 1.6175, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 29.25, |
|
"learning_rate": 2.8736842105263156e-06, |
|
"loss": 1.4108, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 11.125, |
|
"learning_rate": 2.8578947368421055e-06, |
|
"loss": 1.6615, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 26.375, |
|
"learning_rate": 2.842105263157895e-06, |
|
"loss": 1.2572, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 12.0625, |
|
"learning_rate": 2.826315789473684e-06, |
|
"loss": 1.3535, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 34.75, |
|
"learning_rate": 2.8105263157894735e-06, |
|
"loss": 1.137, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 29.375, |
|
"learning_rate": 2.7947368421052635e-06, |
|
"loss": 1.5349, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 23.625, |
|
"learning_rate": 2.778947368421053e-06, |
|
"loss": 1.6624, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 16.625, |
|
"learning_rate": 2.763157894736842e-06, |
|
"loss": 1.3632, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 22.5, |
|
"learning_rate": 2.7473684210526315e-06, |
|
"loss": 1.4244, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 19.5, |
|
"learning_rate": 2.731578947368421e-06, |
|
"loss": 1.1061, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 2.715789473684211e-06, |
|
"loss": 1.2288, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 13.8125, |
|
"learning_rate": 2.7e-06, |
|
"loss": 1.1793, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 15.125, |
|
"learning_rate": 2.6842105263157895e-06, |
|
"loss": 1.4269, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 18.125, |
|
"learning_rate": 2.668421052631579e-06, |
|
"loss": 1.2604, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 31.375, |
|
"learning_rate": 2.6526315789473685e-06, |
|
"loss": 1.3263, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 30.5, |
|
"learning_rate": 2.636842105263158e-06, |
|
"loss": 1.2611, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 23.25, |
|
"learning_rate": 2.6210526315789474e-06, |
|
"loss": 1.2557, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 12.75, |
|
"learning_rate": 2.605263157894737e-06, |
|
"loss": 1.3194, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 22.5, |
|
"learning_rate": 2.5894736842105264e-06, |
|
"loss": 0.818, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 21.5, |
|
"learning_rate": 2.573684210526316e-06, |
|
"loss": 1.3757, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 19.0, |
|
"learning_rate": 2.5578947368421054e-06, |
|
"loss": 1.4837, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 24.0, |
|
"learning_rate": 2.5421052631578945e-06, |
|
"loss": 1.3053, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 2.5263157894736844e-06, |
|
"loss": 1.1291, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 19.625, |
|
"learning_rate": 2.510526315789474e-06, |
|
"loss": 1.3663, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 37.0, |
|
"learning_rate": 2.4947368421052634e-06, |
|
"loss": 1.1908, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 28.125, |
|
"learning_rate": 2.4789473684210524e-06, |
|
"loss": 1.3792, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 18.125, |
|
"learning_rate": 2.4631578947368424e-06, |
|
"loss": 1.2192, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 63.5, |
|
"learning_rate": 2.447368421052632e-06, |
|
"loss": 1.3404, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 22.375, |
|
"learning_rate": 2.431578947368421e-06, |
|
"loss": 1.5099, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 22.25, |
|
"learning_rate": 2.4157894736842104e-06, |
|
"loss": 1.3896, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 23.5, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.2236, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 47.5, |
|
"learning_rate": 2.38421052631579e-06, |
|
"loss": 1.0464, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 17.125, |
|
"learning_rate": 2.368421052631579e-06, |
|
"loss": 1.4431, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 14.875, |
|
"learning_rate": 2.3526315789473684e-06, |
|
"loss": 1.446, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 20.125, |
|
"learning_rate": 2.336842105263158e-06, |
|
"loss": 1.197, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 15.3125, |
|
"learning_rate": 2.3210526315789473e-06, |
|
"loss": 1.4146, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 15.0625, |
|
"learning_rate": 2.305263157894737e-06, |
|
"loss": 1.0645, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 29.125, |
|
"learning_rate": 2.2894736842105263e-06, |
|
"loss": 1.3384, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 24.75, |
|
"learning_rate": 2.273684210526316e-06, |
|
"loss": 1.3842, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 18.125, |
|
"learning_rate": 2.2578947368421053e-06, |
|
"loss": 1.8319, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 16.5, |
|
"learning_rate": 2.242105263157895e-06, |
|
"loss": 1.2403, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 23.0, |
|
"learning_rate": 2.2263157894736843e-06, |
|
"loss": 1.1136, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 23.0, |
|
"learning_rate": 2.2105263157894734e-06, |
|
"loss": 1.3816, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 27.625, |
|
"learning_rate": 2.1947368421052633e-06, |
|
"loss": 1.188, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 36.75, |
|
"learning_rate": 2.1789473684210528e-06, |
|
"loss": 1.1371, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 23.625, |
|
"learning_rate": 2.1631578947368423e-06, |
|
"loss": 1.254, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 27.25, |
|
"learning_rate": 2.1473684210526313e-06, |
|
"loss": 1.2861, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 16.25, |
|
"learning_rate": 2.1315789473684212e-06, |
|
"loss": 1.4082, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 23.5, |
|
"learning_rate": 2.1157894736842107e-06, |
|
"loss": 1.2964, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 1.272, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 34.5, |
|
"learning_rate": 2.0842105263157893e-06, |
|
"loss": 1.3916, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 13.5, |
|
"learning_rate": 2.068421052631579e-06, |
|
"loss": 1.1691, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 21.375, |
|
"learning_rate": 2.0526315789473687e-06, |
|
"loss": 1.1987, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 25.0, |
|
"learning_rate": 2.0368421052631578e-06, |
|
"loss": 1.4224, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 15.5, |
|
"learning_rate": 2.0210526315789473e-06, |
|
"loss": 1.2615, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 18.75, |
|
"learning_rate": 2.005263157894737e-06, |
|
"loss": 1.236, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 21.5, |
|
"learning_rate": 1.9894736842105267e-06, |
|
"loss": 1.3128, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 25.75, |
|
"learning_rate": 1.9736842105263157e-06, |
|
"loss": 1.4185, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 24.0, |
|
"learning_rate": 1.9578947368421052e-06, |
|
"loss": 1.5233, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 37.25, |
|
"learning_rate": 1.9421052631578947e-06, |
|
"loss": 1.5503, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 16.75, |
|
"learning_rate": 1.926315789473684e-06, |
|
"loss": 1.34, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 19.25, |
|
"learning_rate": 1.9105263157894737e-06, |
|
"loss": 1.2821, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 24.75, |
|
"learning_rate": 1.8947368421052632e-06, |
|
"loss": 1.3295, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 17.25, |
|
"learning_rate": 1.8789473684210529e-06, |
|
"loss": 1.2632, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 32.0, |
|
"learning_rate": 1.8631578947368422e-06, |
|
"loss": 1.4812, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 22.625, |
|
"learning_rate": 1.8473684210526317e-06, |
|
"loss": 1.3081, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 22.375, |
|
"learning_rate": 1.831578947368421e-06, |
|
"loss": 1.3388, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 20.25, |
|
"learning_rate": 1.8157894736842106e-06, |
|
"loss": 1.5395, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 36.0, |
|
"learning_rate": 1.8e-06, |
|
"loss": 1.497, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 16.5, |
|
"learning_rate": 1.7842105263157896e-06, |
|
"loss": 1.6087, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 13.625, |
|
"learning_rate": 1.768421052631579e-06, |
|
"loss": 1.1516, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 18.5, |
|
"learning_rate": 1.7526315789473686e-06, |
|
"loss": 1.2703, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 20.25, |
|
"learning_rate": 1.7368421052631579e-06, |
|
"loss": 1.2513, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 26.75, |
|
"learning_rate": 1.7210526315789474e-06, |
|
"loss": 1.6455, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 27.5, |
|
"learning_rate": 1.7052631578947369e-06, |
|
"loss": 1.3645, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 15.0, |
|
"learning_rate": 1.6894736842105264e-06, |
|
"loss": 1.2663, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 24.625, |
|
"learning_rate": 1.6736842105263158e-06, |
|
"loss": 1.1592, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 27.125, |
|
"learning_rate": 1.6578947368421053e-06, |
|
"loss": 1.526, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 12.6875, |
|
"learning_rate": 1.6421052631578948e-06, |
|
"loss": 1.5893, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 26.375, |
|
"learning_rate": 1.6263157894736843e-06, |
|
"loss": 1.4342, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 25.75, |
|
"learning_rate": 1.6105263157894738e-06, |
|
"loss": 1.5345, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 19.375, |
|
"learning_rate": 1.5947368421052633e-06, |
|
"loss": 1.6273, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 26.875, |
|
"learning_rate": 1.5789473684210526e-06, |
|
"loss": 1.5424, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 19.875, |
|
"learning_rate": 1.563157894736842e-06, |
|
"loss": 1.0732, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 19.125, |
|
"learning_rate": 1.5473684210526316e-06, |
|
"loss": 1.5322, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 22.375, |
|
"learning_rate": 1.531578947368421e-06, |
|
"loss": 1.08, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 19.875, |
|
"learning_rate": 1.5157894736842105e-06, |
|
"loss": 1.1508, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 25.75, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.488, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 12.875, |
|
"learning_rate": 1.4842105263157895e-06, |
|
"loss": 1.1849, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 11.375, |
|
"learning_rate": 1.4684210526315788e-06, |
|
"loss": 1.1724, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 22.5, |
|
"learning_rate": 1.4526315789473685e-06, |
|
"loss": 1.493, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 13.5625, |
|
"learning_rate": 1.4368421052631578e-06, |
|
"loss": 1.3404, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 21.625, |
|
"learning_rate": 1.4210526315789475e-06, |
|
"loss": 1.6391, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 20.0, |
|
"learning_rate": 1.4052631578947368e-06, |
|
"loss": 1.4825, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 22.375, |
|
"learning_rate": 1.3894736842105265e-06, |
|
"loss": 1.1878, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 20.125, |
|
"learning_rate": 1.3736842105263158e-06, |
|
"loss": 1.3874, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 17.125, |
|
"learning_rate": 1.3578947368421055e-06, |
|
"loss": 1.5342, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 21.375, |
|
"learning_rate": 1.3421052631578947e-06, |
|
"loss": 1.6216, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 19.625, |
|
"learning_rate": 1.3263157894736842e-06, |
|
"loss": 1.0346, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 16.625, |
|
"learning_rate": 1.3105263157894737e-06, |
|
"loss": 1.7393, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 27.125, |
|
"learning_rate": 1.2947368421052632e-06, |
|
"loss": 1.0278, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 14.5, |
|
"learning_rate": 1.2789473684210527e-06, |
|
"loss": 1.3802, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 22.75, |
|
"learning_rate": 1.2631578947368422e-06, |
|
"loss": 1.4699, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 13.6875, |
|
"learning_rate": 1.2473684210526317e-06, |
|
"loss": 1.3465, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 17.625, |
|
"learning_rate": 1.2315789473684212e-06, |
|
"loss": 1.5725, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 26.125, |
|
"learning_rate": 1.2157894736842105e-06, |
|
"loss": 1.3008, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 39.75, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.2357, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 28.25, |
|
"learning_rate": 1.1842105263157894e-06, |
|
"loss": 1.1002, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 27.75, |
|
"learning_rate": 1.168421052631579e-06, |
|
"loss": 1.2126, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 22.125, |
|
"learning_rate": 1.1526315789473684e-06, |
|
"loss": 1.5985, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 23.25, |
|
"learning_rate": 1.136842105263158e-06, |
|
"loss": 1.4414, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 40.75, |
|
"learning_rate": 1.1210526315789474e-06, |
|
"loss": 1.5736, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 24.0, |
|
"learning_rate": 1.1052631578947367e-06, |
|
"loss": 1.2877, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 14.0625, |
|
"learning_rate": 1.0894736842105264e-06, |
|
"loss": 1.3472, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 20.0, |
|
"learning_rate": 1.0736842105263157e-06, |
|
"loss": 1.2715, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 16.0, |
|
"learning_rate": 1.0578947368421054e-06, |
|
"loss": 1.1682, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 28.125, |
|
"learning_rate": 1.0421052631578946e-06, |
|
"loss": 1.4274, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 8.25, |
|
"learning_rate": 1.0263157894736843e-06, |
|
"loss": 1.4196, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 23.5, |
|
"learning_rate": 1.0105263157894736e-06, |
|
"loss": 1.2992, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 30.875, |
|
"learning_rate": 9.947368421052633e-07, |
|
"loss": 1.6002, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 20.75, |
|
"learning_rate": 9.789473684210526e-07, |
|
"loss": 1.5259, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 21.375, |
|
"learning_rate": 9.63157894736842e-07, |
|
"loss": 0.9239, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 41.75, |
|
"learning_rate": 9.473684210526316e-07, |
|
"loss": 1.4224, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 19.25, |
|
"learning_rate": 9.315789473684211e-07, |
|
"loss": 1.3847, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 17.625, |
|
"learning_rate": 9.157894736842105e-07, |
|
"loss": 1.4742, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 23.25, |
|
"learning_rate": 9e-07, |
|
"loss": 1.2696, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 18.75, |
|
"learning_rate": 8.842105263157895e-07, |
|
"loss": 1.4461, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 27.625, |
|
"learning_rate": 8.684210526315789e-07, |
|
"loss": 1.3385, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 20.875, |
|
"learning_rate": 8.526315789473684e-07, |
|
"loss": 1.2778, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 27.125, |
|
"learning_rate": 8.368421052631579e-07, |
|
"loss": 1.5869, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 19.875, |
|
"learning_rate": 8.210526315789474e-07, |
|
"loss": 1.1702, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 26.375, |
|
"learning_rate": 8.052631578947369e-07, |
|
"loss": 1.093, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 11.375, |
|
"learning_rate": 7.894736842105263e-07, |
|
"loss": 1.0597, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 22.5, |
|
"learning_rate": 7.736842105263158e-07, |
|
"loss": 1.033, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 19.75, |
|
"learning_rate": 7.578947368421053e-07, |
|
"loss": 1.2779, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 20.875, |
|
"learning_rate": 7.421052631578948e-07, |
|
"loss": 1.0749, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 26.625, |
|
"learning_rate": 7.263157894736843e-07, |
|
"loss": 1.5332, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 11.25, |
|
"learning_rate": 7.105263157894737e-07, |
|
"loss": 1.1336, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 18.875, |
|
"learning_rate": 6.947368421052632e-07, |
|
"loss": 1.4246, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 12.8125, |
|
"learning_rate": 6.789473684210527e-07, |
|
"loss": 1.2568, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 19.0, |
|
"learning_rate": 6.631578947368421e-07, |
|
"loss": 1.3689, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 17.5, |
|
"learning_rate": 6.473684210526316e-07, |
|
"loss": 1.0736, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 22.625, |
|
"learning_rate": 6.315789473684211e-07, |
|
"loss": 1.3696, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 23.375, |
|
"learning_rate": 6.157894736842106e-07, |
|
"loss": 1.3506, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 15.8125, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 1.4374, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 22.0, |
|
"learning_rate": 5.842105263157895e-07, |
|
"loss": 1.4159, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 25.5, |
|
"learning_rate": 5.68421052631579e-07, |
|
"loss": 1.4591, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 40.5, |
|
"learning_rate": 5.526315789473683e-07, |
|
"loss": 1.1688, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 24.0, |
|
"learning_rate": 5.368421052631578e-07, |
|
"loss": 1.5717, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 9.0, |
|
"learning_rate": 5.210526315789473e-07, |
|
"loss": 1.387, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 25.5, |
|
"learning_rate": 5.052631578947368e-07, |
|
"loss": 1.5485, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 24.375, |
|
"learning_rate": 4.894736842105263e-07, |
|
"loss": 1.4857, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 19.375, |
|
"learning_rate": 4.736842105263158e-07, |
|
"loss": 1.1609, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 18.625, |
|
"learning_rate": 4.5789473684210523e-07, |
|
"loss": 1.4044, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 20.625, |
|
"learning_rate": 4.421052631578947e-07, |
|
"loss": 1.405, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 26.5, |
|
"learning_rate": 4.263157894736842e-07, |
|
"loss": 1.2905, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 13.4375, |
|
"learning_rate": 4.105263157894737e-07, |
|
"loss": 1.4932, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 18.75, |
|
"learning_rate": 3.9473684210526315e-07, |
|
"loss": 1.4775, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 23.0, |
|
"learning_rate": 3.7894736842105264e-07, |
|
"loss": 1.3495, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 15.4375, |
|
"learning_rate": 3.6315789473684213e-07, |
|
"loss": 1.1849, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 27.125, |
|
"learning_rate": 3.473684210526316e-07, |
|
"loss": 1.2486, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 31.75, |
|
"learning_rate": 3.3157894736842106e-07, |
|
"loss": 1.3132, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 52.25, |
|
"learning_rate": 3.1578947368421055e-07, |
|
"loss": 1.0418, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 13.0625, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 1.1115, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 18.0, |
|
"learning_rate": 2.842105263157895e-07, |
|
"loss": 1.418, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 54.5, |
|
"learning_rate": 2.684210526315789e-07, |
|
"loss": 1.6085, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 25.875, |
|
"learning_rate": 2.526315789473684e-07, |
|
"loss": 1.4588, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 17.75, |
|
"learning_rate": 2.368421052631579e-07, |
|
"loss": 1.6215, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 15.875, |
|
"learning_rate": 2.2105263157894736e-07, |
|
"loss": 1.4949, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 13.5, |
|
"learning_rate": 2.0526315789473685e-07, |
|
"loss": 1.1319, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 21.75, |
|
"learning_rate": 1.8947368421052632e-07, |
|
"loss": 1.2823, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 31.0, |
|
"learning_rate": 1.736842105263158e-07, |
|
"loss": 1.4937, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 12.25, |
|
"learning_rate": 1.5789473684210527e-07, |
|
"loss": 0.9809, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 45.25, |
|
"learning_rate": 1.4210526315789474e-07, |
|
"loss": 1.2351, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 23.875, |
|
"learning_rate": 1.263157894736842e-07, |
|
"loss": 1.2427, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 19.125, |
|
"learning_rate": 1.1052631578947368e-07, |
|
"loss": 1.6262, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 22.125, |
|
"learning_rate": 9.473684210526316e-08, |
|
"loss": 1.1822, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 23.0, |
|
"learning_rate": 7.894736842105264e-08, |
|
"loss": 1.3516, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 20.25, |
|
"learning_rate": 6.31578947368421e-08, |
|
"loss": 1.3155, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 19.125, |
|
"learning_rate": 4.736842105263158e-08, |
|
"loss": 1.4484, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 24.25, |
|
"learning_rate": 3.157894736842105e-08, |
|
"loss": 1.4975, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 9.6875, |
|
"learning_rate": 1.5789473684210525e-08, |
|
"loss": 1.4104, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 16.875, |
|
"learning_rate": 0.0, |
|
"loss": 1.4947, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 1.391784906387329, |
|
"eval_runtime": 30.8802, |
|
"eval_samples_per_second": 32.383, |
|
"eval_steps_per_second": 32.383, |
|
"step": 10000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 10000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 2000, |
|
"total_flos": 8.06961020928e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|