|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 49.994350282485875, |
|
"global_step": 4400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 3.5742, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 3.4353, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9e-07, |
|
"loss": 3.2438, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 3.0976, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.5e-06, |
|
"loss": 2.8575, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8e-06, |
|
"loss": 2.6829, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 2.4912, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 2.4056, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 2.7e-06, |
|
"loss": 2.4176, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3e-06, |
|
"loss": 2.2921, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.3e-06, |
|
"loss": 2.2252, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.6e-06, |
|
"loss": 2.2169, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.9e-06, |
|
"loss": 2.156, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 2.194, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.5e-06, |
|
"loss": 2.1136, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 2.1497, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.1e-06, |
|
"loss": 2.1208, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 5.4e-06, |
|
"loss": 2.2234, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 5.7000000000000005e-06, |
|
"loss": 2.0122, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 6e-06, |
|
"loss": 2.0509, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 6.3e-06, |
|
"loss": 2.0077, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 6.6e-06, |
|
"loss": 2.0297, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.900000000000001e-06, |
|
"loss": 2.0226, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 7.2e-06, |
|
"loss": 2.0274, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 7.5e-06, |
|
"loss": 2.0505, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 7.8e-06, |
|
"loss": 2.0163, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 8.1e-06, |
|
"loss": 2.0764, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 1.9411, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 8.7e-06, |
|
"loss": 1.9308, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 9e-06, |
|
"loss": 1.9507, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 9.3e-06, |
|
"loss": 1.9036, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 1.9728, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 9.9e-06, |
|
"loss": 1.9105, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.02e-05, |
|
"loss": 1.957, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 1.05e-05, |
|
"loss": 1.9615, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.08e-05, |
|
"loss": 1.9444, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 1.11e-05, |
|
"loss": 1.9014, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 1.1400000000000001e-05, |
|
"loss": 1.8752, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 1.1700000000000001e-05, |
|
"loss": 1.8787, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.8929, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 1.2299999999999999e-05, |
|
"loss": 1.9223, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.26e-05, |
|
"loss": 1.8559, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 1.29e-05, |
|
"loss": 1.8533, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 1.32e-05, |
|
"loss": 1.8577, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 1.9091, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 1.8096, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 1.4099999999999999e-05, |
|
"loss": 1.8333, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 1.44e-05, |
|
"loss": 1.8166, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 1.47e-05, |
|
"loss": 1.8077, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.8163, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 1.53e-05, |
|
"loss": 1.8269, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 1.56e-05, |
|
"loss": 1.8201, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 1.59e-05, |
|
"loss": 1.936, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 1.62e-05, |
|
"loss": 1.7812, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 1.65e-05, |
|
"loss": 1.7872, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 1.782, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.71e-05, |
|
"loss": 1.783, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 1.74e-05, |
|
"loss": 1.7638, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.77e-05, |
|
"loss": 1.7805, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.7768, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.83e-05, |
|
"loss": 1.7864, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.86e-05, |
|
"loss": 1.8437, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 1.8900000000000002e-05, |
|
"loss": 1.718, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 1.7334, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 1.95e-05, |
|
"loss": 1.7445, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.98e-05, |
|
"loss": 1.7553, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 2.01e-05, |
|
"loss": 1.7567, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 2.04e-05, |
|
"loss": 1.7498, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 2.07e-05, |
|
"loss": 1.7273, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 2.1e-05, |
|
"loss": 1.7588, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 2.13e-05, |
|
"loss": 1.8182, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 2.16e-05, |
|
"loss": 1.7256, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 2.19e-05, |
|
"loss": 1.7198, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 2.22e-05, |
|
"loss": 1.6916, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 2.25e-05, |
|
"loss": 1.7189, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 2.2800000000000002e-05, |
|
"loss": 1.7172, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 2.3100000000000002e-05, |
|
"loss": 1.7219, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 2.3400000000000003e-05, |
|
"loss": 1.7425, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 2.37e-05, |
|
"loss": 1.7384, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.7836, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 2.43e-05, |
|
"loss": 1.6577, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 2.4599999999999998e-05, |
|
"loss": 1.6644, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 2.49e-05, |
|
"loss": 1.6861, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 2.52e-05, |
|
"loss": 1.6939, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 2.55e-05, |
|
"loss": 1.7055, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 2.58e-05, |
|
"loss": 1.7117, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 2.61e-05, |
|
"loss": 1.6968, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"learning_rate": 2.64e-05, |
|
"loss": 1.7166, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 2.6700000000000002e-05, |
|
"loss": 1.7732, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 1.6664, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 10.34, |
|
"learning_rate": 2.7300000000000003e-05, |
|
"loss": 1.6757, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 10.45, |
|
"learning_rate": 2.7600000000000003e-05, |
|
"loss": 1.6687, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"learning_rate": 2.79e-05, |
|
"loss": 1.6693, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 10.68, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 1.6738, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 10.79, |
|
"learning_rate": 2.8499999999999998e-05, |
|
"loss": 1.6807, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"learning_rate": 2.88e-05, |
|
"loss": 1.6875, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 11.02, |
|
"learning_rate": 2.91e-05, |
|
"loss": 1.7664, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 11.14, |
|
"learning_rate": 2.94e-05, |
|
"loss": 1.6674, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"learning_rate": 2.97e-05, |
|
"loss": 1.6369, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 11.36, |
|
"learning_rate": 3e-05, |
|
"loss": 1.6675, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 11.36, |
|
"eval_denotation_accuracy": 0.18014835747085836, |
|
"eval_loss": 2.312751293182373, |
|
"eval_runtime": 1826.7928, |
|
"eval_samples_per_second": 1.55, |
|
"eval_steps_per_second": 0.097, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 11.47, |
|
"learning_rate": 2.9911764705882352e-05, |
|
"loss": 1.6501, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 11.59, |
|
"learning_rate": 2.9823529411764707e-05, |
|
"loss": 1.6657, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"learning_rate": 2.973529411764706e-05, |
|
"loss": 1.6592, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 11.81, |
|
"learning_rate": 2.9647058823529414e-05, |
|
"loss": 1.6575, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 11.93, |
|
"learning_rate": 2.9558823529411766e-05, |
|
"loss": 1.6638, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 12.05, |
|
"learning_rate": 2.9470588235294117e-05, |
|
"loss": 1.7401, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 12.16, |
|
"learning_rate": 2.938235294117647e-05, |
|
"loss": 1.6335, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 12.27, |
|
"learning_rate": 2.9294117647058824e-05, |
|
"loss": 1.6446, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 12.38, |
|
"learning_rate": 2.9205882352941175e-05, |
|
"loss": 1.6233, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 2.911764705882353e-05, |
|
"loss": 1.6528, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"learning_rate": 2.9029411764705882e-05, |
|
"loss": 1.6488, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 12.72, |
|
"learning_rate": 2.8941176470588237e-05, |
|
"loss": 1.644, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"learning_rate": 2.885294117647059e-05, |
|
"loss": 1.6455, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 12.95, |
|
"learning_rate": 2.8764705882352943e-05, |
|
"loss": 1.6705, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 13.07, |
|
"learning_rate": 2.8676470588235295e-05, |
|
"loss": 1.7129, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 13.18, |
|
"learning_rate": 2.8588235294117647e-05, |
|
"loss": 1.6363, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 13.29, |
|
"learning_rate": 2.8499999999999998e-05, |
|
"loss": 1.6237, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 13.41, |
|
"learning_rate": 2.8411764705882353e-05, |
|
"loss": 1.6011, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 13.52, |
|
"learning_rate": 2.8323529411764705e-05, |
|
"loss": 1.6302, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 13.63, |
|
"learning_rate": 2.823529411764706e-05, |
|
"loss": 1.6277, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 13.75, |
|
"learning_rate": 2.814705882352941e-05, |
|
"loss": 1.6095, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 13.86, |
|
"learning_rate": 2.8058823529411766e-05, |
|
"loss": 1.6188, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 13.97, |
|
"learning_rate": 2.7970588235294118e-05, |
|
"loss": 1.6085, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 14.09, |
|
"learning_rate": 2.7882352941176473e-05, |
|
"loss": 1.6951, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 14.2, |
|
"learning_rate": 2.7794117647058824e-05, |
|
"loss": 1.6059, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 14.32, |
|
"learning_rate": 2.770588235294118e-05, |
|
"loss": 1.6191, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 14.43, |
|
"learning_rate": 2.7617647058823528e-05, |
|
"loss": 1.6178, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 14.54, |
|
"learning_rate": 2.7529411764705883e-05, |
|
"loss": 1.6226, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 14.66, |
|
"learning_rate": 2.7441176470588234e-05, |
|
"loss": 1.6099, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 14.77, |
|
"learning_rate": 2.735294117647059e-05, |
|
"loss": 1.5953, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 14.88, |
|
"learning_rate": 2.726470588235294e-05, |
|
"loss": 1.6067, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 14.99, |
|
"learning_rate": 2.7176470588235296e-05, |
|
"loss": 1.6062, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"learning_rate": 2.7088235294117647e-05, |
|
"loss": 1.6788, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 15.23, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 1.5883, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 15.34, |
|
"learning_rate": 2.6911764705882354e-05, |
|
"loss": 1.582, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 15.45, |
|
"learning_rate": 2.682352941176471e-05, |
|
"loss": 1.5865, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 15.56, |
|
"learning_rate": 2.673529411764706e-05, |
|
"loss": 1.5797, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 15.68, |
|
"learning_rate": 2.6647058823529412e-05, |
|
"loss": 1.5991, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 15.79, |
|
"learning_rate": 2.6558823529411764e-05, |
|
"loss": 1.604, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 15.9, |
|
"learning_rate": 2.647058823529412e-05, |
|
"loss": 1.5845, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 16.02, |
|
"learning_rate": 2.638235294117647e-05, |
|
"loss": 1.661, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 16.14, |
|
"learning_rate": 2.6294117647058825e-05, |
|
"loss": 1.5842, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 16.25, |
|
"learning_rate": 2.6205882352941177e-05, |
|
"loss": 1.576, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 16.36, |
|
"learning_rate": 2.6117647058823532e-05, |
|
"loss": 1.5771, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 16.47, |
|
"learning_rate": 2.6029411764705883e-05, |
|
"loss": 1.5764, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 16.59, |
|
"learning_rate": 2.594117647058824e-05, |
|
"loss": 1.586, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 16.7, |
|
"learning_rate": 2.585294117647059e-05, |
|
"loss": 1.5773, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 16.81, |
|
"learning_rate": 2.576470588235294e-05, |
|
"loss": 1.5814, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 16.93, |
|
"learning_rate": 2.5676470588235293e-05, |
|
"loss": 1.5812, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 17.05, |
|
"learning_rate": 2.5588235294117648e-05, |
|
"loss": 1.6478, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 17.16, |
|
"learning_rate": 2.55e-05, |
|
"loss": 1.5619, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 17.27, |
|
"learning_rate": 2.5411764705882355e-05, |
|
"loss": 1.577, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 17.38, |
|
"learning_rate": 2.5323529411764706e-05, |
|
"loss": 1.5569, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 2.523529411764706e-05, |
|
"loss": 1.5831, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 17.61, |
|
"learning_rate": 2.5147058823529413e-05, |
|
"loss": 1.5613, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 17.72, |
|
"learning_rate": 2.5058823529411768e-05, |
|
"loss": 1.5594, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 17.84, |
|
"learning_rate": 2.497058823529412e-05, |
|
"loss": 1.5723, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 17.95, |
|
"learning_rate": 2.488235294117647e-05, |
|
"loss": 1.576, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 18.07, |
|
"learning_rate": 2.4794117647058823e-05, |
|
"loss": 1.6366, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 18.18, |
|
"learning_rate": 2.4705882352941174e-05, |
|
"loss": 1.5548, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 18.29, |
|
"learning_rate": 2.461764705882353e-05, |
|
"loss": 1.5511, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 18.41, |
|
"learning_rate": 2.452941176470588e-05, |
|
"loss": 1.5625, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 18.52, |
|
"learning_rate": 2.4441176470588236e-05, |
|
"loss": 1.5676, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 18.63, |
|
"learning_rate": 2.4352941176470587e-05, |
|
"loss": 1.5635, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 18.75, |
|
"learning_rate": 2.4264705882352942e-05, |
|
"loss": 1.5552, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 18.86, |
|
"learning_rate": 2.4176470588235294e-05, |
|
"loss": 1.5499, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 18.97, |
|
"learning_rate": 2.408823529411765e-05, |
|
"loss": 1.5592, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 19.09, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.6249, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 19.2, |
|
"learning_rate": 2.3911764705882356e-05, |
|
"loss": 1.5408, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 19.32, |
|
"learning_rate": 2.3823529411764704e-05, |
|
"loss": 1.5567, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 19.43, |
|
"learning_rate": 2.373529411764706e-05, |
|
"loss": 1.5508, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 19.54, |
|
"learning_rate": 2.364705882352941e-05, |
|
"loss": 1.5482, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 19.66, |
|
"learning_rate": 2.3558823529411765e-05, |
|
"loss": 1.541, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 19.77, |
|
"learning_rate": 2.3470588235294117e-05, |
|
"loss": 1.551, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 19.88, |
|
"learning_rate": 2.3382352941176472e-05, |
|
"loss": 1.5503, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 19.99, |
|
"learning_rate": 2.3294117647058824e-05, |
|
"loss": 1.5499, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 20.11, |
|
"learning_rate": 2.320588235294118e-05, |
|
"loss": 1.6213, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 20.23, |
|
"learning_rate": 2.311764705882353e-05, |
|
"loss": 1.5295, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 20.34, |
|
"learning_rate": 2.3029411764705885e-05, |
|
"loss": 1.5382, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 20.45, |
|
"learning_rate": 2.2941176470588233e-05, |
|
"loss": 1.5405, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 20.56, |
|
"learning_rate": 2.2852941176470588e-05, |
|
"loss": 1.5342, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 20.68, |
|
"learning_rate": 2.276470588235294e-05, |
|
"loss": 1.5444, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 20.79, |
|
"learning_rate": 2.2676470588235295e-05, |
|
"loss": 1.5444, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 20.9, |
|
"learning_rate": 2.2588235294117646e-05, |
|
"loss": 1.5363, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 21.02, |
|
"learning_rate": 2.25e-05, |
|
"loss": 1.6267, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 21.14, |
|
"learning_rate": 2.2411764705882353e-05, |
|
"loss": 1.5449, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 21.25, |
|
"learning_rate": 2.2323529411764708e-05, |
|
"loss": 1.5241, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 21.36, |
|
"learning_rate": 2.223529411764706e-05, |
|
"loss": 1.5301, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 21.47, |
|
"learning_rate": 2.2147058823529415e-05, |
|
"loss": 1.5385, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 21.59, |
|
"learning_rate": 2.2058823529411766e-05, |
|
"loss": 1.5206, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 21.7, |
|
"learning_rate": 2.1970588235294118e-05, |
|
"loss": 1.5306, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 21.81, |
|
"learning_rate": 2.188235294117647e-05, |
|
"loss": 1.5302, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 21.93, |
|
"learning_rate": 2.1794117647058824e-05, |
|
"loss": 1.5401, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 22.05, |
|
"learning_rate": 2.1705882352941176e-05, |
|
"loss": 1.6063, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 22.16, |
|
"learning_rate": 2.161764705882353e-05, |
|
"loss": 1.5251, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 22.27, |
|
"learning_rate": 2.1529411764705882e-05, |
|
"loss": 1.5197, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 22.38, |
|
"learning_rate": 2.1441176470588237e-05, |
|
"loss": 1.524, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 22.5, |
|
"learning_rate": 2.135294117647059e-05, |
|
"loss": 1.5233, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 22.61, |
|
"learning_rate": 2.1264705882352944e-05, |
|
"loss": 1.5256, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 22.72, |
|
"learning_rate": 2.1176470588235296e-05, |
|
"loss": 1.5264, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 22.72, |
|
"eval_denotation_accuracy": 0.48675379724478984, |
|
"eval_loss": 2.4037857055664062, |
|
"eval_runtime": 1745.3919, |
|
"eval_samples_per_second": 1.622, |
|
"eval_steps_per_second": 0.101, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 22.84, |
|
"learning_rate": 2.1088235294117647e-05, |
|
"loss": 1.527, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 22.95, |
|
"learning_rate": 2.1e-05, |
|
"loss": 1.5229, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 23.07, |
|
"learning_rate": 2.0911764705882354e-05, |
|
"loss": 1.6007, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 23.18, |
|
"learning_rate": 2.0823529411764705e-05, |
|
"loss": 1.5107, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 23.29, |
|
"learning_rate": 2.073529411764706e-05, |
|
"loss": 1.5148, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 23.41, |
|
"learning_rate": 2.0647058823529412e-05, |
|
"loss": 1.5174, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 23.52, |
|
"learning_rate": 2.0558823529411767e-05, |
|
"loss": 1.5141, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 23.63, |
|
"learning_rate": 2.047058823529412e-05, |
|
"loss": 1.5166, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 23.75, |
|
"learning_rate": 2.0382352941176474e-05, |
|
"loss": 1.5197, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 23.86, |
|
"learning_rate": 2.0294117647058825e-05, |
|
"loss": 1.5154, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 23.97, |
|
"learning_rate": 2.020588235294118e-05, |
|
"loss": 1.5155, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 24.09, |
|
"learning_rate": 2.011764705882353e-05, |
|
"loss": 1.5843, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 24.2, |
|
"learning_rate": 2.0029411764705883e-05, |
|
"loss": 1.5217, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 24.32, |
|
"learning_rate": 1.9941176470588235e-05, |
|
"loss": 1.5099, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 24.43, |
|
"learning_rate": 1.9852941176470586e-05, |
|
"loss": 1.5118, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 24.54, |
|
"learning_rate": 1.976470588235294e-05, |
|
"loss": 1.5216, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 24.66, |
|
"learning_rate": 1.9676470588235293e-05, |
|
"loss": 1.5088, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 24.77, |
|
"learning_rate": 1.9588235294117648e-05, |
|
"loss": 1.5125, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 24.88, |
|
"learning_rate": 1.95e-05, |
|
"loss": 1.517, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 24.99, |
|
"learning_rate": 1.9411764705882355e-05, |
|
"loss": 1.5136, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 25.11, |
|
"learning_rate": 1.9323529411764706e-05, |
|
"loss": 1.5812, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 25.23, |
|
"learning_rate": 1.923529411764706e-05, |
|
"loss": 1.513, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 25.34, |
|
"learning_rate": 1.914705882352941e-05, |
|
"loss": 1.5135, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 25.45, |
|
"learning_rate": 1.9058823529411764e-05, |
|
"loss": 1.5066, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 25.56, |
|
"learning_rate": 1.8970588235294116e-05, |
|
"loss": 1.5087, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 25.68, |
|
"learning_rate": 1.888235294117647e-05, |
|
"loss": 1.5013, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 25.79, |
|
"learning_rate": 1.8794117647058823e-05, |
|
"loss": 1.5059, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 25.9, |
|
"learning_rate": 1.8705882352941178e-05, |
|
"loss": 1.5057, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 26.02, |
|
"learning_rate": 1.861764705882353e-05, |
|
"loss": 1.579, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 26.14, |
|
"learning_rate": 1.8529411764705884e-05, |
|
"loss": 1.5009, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 26.25, |
|
"learning_rate": 1.8441176470588236e-05, |
|
"loss": 1.4947, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 26.36, |
|
"learning_rate": 1.835294117647059e-05, |
|
"loss": 1.502, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 26.47, |
|
"learning_rate": 1.826470588235294e-05, |
|
"loss": 1.5006, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 26.59, |
|
"learning_rate": 1.8176470588235294e-05, |
|
"loss": 1.4958, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 26.7, |
|
"learning_rate": 1.8088235294117645e-05, |
|
"loss": 1.5004, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 26.81, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.497, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 26.93, |
|
"learning_rate": 1.7911764705882352e-05, |
|
"loss": 1.5019, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 27.05, |
|
"learning_rate": 1.7823529411764707e-05, |
|
"loss": 1.5746, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 27.16, |
|
"learning_rate": 1.773529411764706e-05, |
|
"loss": 1.4954, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 27.27, |
|
"learning_rate": 1.7647058823529414e-05, |
|
"loss": 1.4968, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 27.38, |
|
"learning_rate": 1.7558823529411765e-05, |
|
"loss": 1.4959, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 27.5, |
|
"learning_rate": 1.747058823529412e-05, |
|
"loss": 1.5, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 27.61, |
|
"learning_rate": 1.7382352941176472e-05, |
|
"loss": 1.4945, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 27.72, |
|
"learning_rate": 1.7294117647058823e-05, |
|
"loss": 1.493, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 27.84, |
|
"learning_rate": 1.7205882352941175e-05, |
|
"loss": 1.5007, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 27.95, |
|
"learning_rate": 1.711764705882353e-05, |
|
"loss": 1.5059, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 28.07, |
|
"learning_rate": 1.702941176470588e-05, |
|
"loss": 1.5709, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 28.18, |
|
"learning_rate": 1.6941176470588237e-05, |
|
"loss": 1.4905, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 28.29, |
|
"learning_rate": 1.6852941176470588e-05, |
|
"loss": 1.4894, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 28.41, |
|
"learning_rate": 1.6764705882352943e-05, |
|
"loss": 1.5007, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 28.52, |
|
"learning_rate": 1.6676470588235295e-05, |
|
"loss": 1.4908, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 28.63, |
|
"learning_rate": 1.658823529411765e-05, |
|
"loss": 1.4912, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 28.75, |
|
"learning_rate": 1.65e-05, |
|
"loss": 1.493, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 28.86, |
|
"learning_rate": 1.6411764705882356e-05, |
|
"loss": 1.4957, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 28.97, |
|
"learning_rate": 1.6323529411764704e-05, |
|
"loss": 1.4919, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 29.09, |
|
"learning_rate": 1.623529411764706e-05, |
|
"loss": 1.5667, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 29.2, |
|
"learning_rate": 1.614705882352941e-05, |
|
"loss": 1.4908, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 29.32, |
|
"learning_rate": 1.6058823529411766e-05, |
|
"loss": 1.4893, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 29.43, |
|
"learning_rate": 1.5970588235294118e-05, |
|
"loss": 1.4898, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 29.54, |
|
"learning_rate": 1.5882352941176473e-05, |
|
"loss": 1.4929, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 29.66, |
|
"learning_rate": 1.5794117647058824e-05, |
|
"loss": 1.4858, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 29.77, |
|
"learning_rate": 1.570588235294118e-05, |
|
"loss": 1.4946, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 29.88, |
|
"learning_rate": 1.561764705882353e-05, |
|
"loss": 1.491, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 29.99, |
|
"learning_rate": 1.5529411764705886e-05, |
|
"loss": 1.4873, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 30.11, |
|
"learning_rate": 1.5441176470588234e-05, |
|
"loss": 1.5672, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 30.23, |
|
"learning_rate": 1.535294117647059e-05, |
|
"loss": 1.4865, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 30.34, |
|
"learning_rate": 1.526470588235294e-05, |
|
"loss": 1.4895, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 30.45, |
|
"learning_rate": 1.5176470588235294e-05, |
|
"loss": 1.485, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 30.56, |
|
"learning_rate": 1.5088235294117647e-05, |
|
"loss": 1.4892, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 30.68, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.4854, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 30.79, |
|
"learning_rate": 1.4911764705882354e-05, |
|
"loss": 1.4862, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 30.9, |
|
"learning_rate": 1.4823529411764707e-05, |
|
"loss": 1.4886, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 31.02, |
|
"learning_rate": 1.4735294117647059e-05, |
|
"loss": 1.5558, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 31.14, |
|
"learning_rate": 1.4647058823529412e-05, |
|
"loss": 1.4916, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 31.25, |
|
"learning_rate": 1.4558823529411765e-05, |
|
"loss": 1.4892, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 31.36, |
|
"learning_rate": 1.4470588235294118e-05, |
|
"loss": 1.4861, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 31.47, |
|
"learning_rate": 1.4382352941176472e-05, |
|
"loss": 1.4841, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 31.59, |
|
"learning_rate": 1.4294117647058823e-05, |
|
"loss": 1.4834, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 31.7, |
|
"learning_rate": 1.4205882352941177e-05, |
|
"loss": 1.4842, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 31.81, |
|
"learning_rate": 1.411764705882353e-05, |
|
"loss": 1.4834, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 31.93, |
|
"learning_rate": 1.4029411764705883e-05, |
|
"loss": 1.483, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 32.05, |
|
"learning_rate": 1.3941176470588236e-05, |
|
"loss": 1.5567, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 32.16, |
|
"learning_rate": 1.385294117647059e-05, |
|
"loss": 1.4841, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 32.27, |
|
"learning_rate": 1.3764705882352941e-05, |
|
"loss": 1.4858, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 32.38, |
|
"learning_rate": 1.3676470588235295e-05, |
|
"loss": 1.483, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 32.5, |
|
"learning_rate": 1.3588235294117648e-05, |
|
"loss": 1.4781, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 32.61, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 1.4818, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 32.72, |
|
"learning_rate": 1.3411764705882354e-05, |
|
"loss": 1.4802, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 32.84, |
|
"learning_rate": 1.3323529411764706e-05, |
|
"loss": 1.4835, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 32.95, |
|
"learning_rate": 1.323529411764706e-05, |
|
"loss": 1.486, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 33.07, |
|
"learning_rate": 1.3147058823529413e-05, |
|
"loss": 1.5547, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 33.18, |
|
"learning_rate": 1.3058823529411766e-05, |
|
"loss": 1.4813, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 33.29, |
|
"learning_rate": 1.297058823529412e-05, |
|
"loss": 1.4857, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 33.41, |
|
"learning_rate": 1.288235294117647e-05, |
|
"loss": 1.4853, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 33.52, |
|
"learning_rate": 1.2794117647058824e-05, |
|
"loss": 1.4772, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 33.63, |
|
"learning_rate": 1.2705882352941177e-05, |
|
"loss": 1.4826, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 33.75, |
|
"learning_rate": 1.261764705882353e-05, |
|
"loss": 1.484, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 33.86, |
|
"learning_rate": 1.2529411764705884e-05, |
|
"loss": 1.4864, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 33.97, |
|
"learning_rate": 1.2441176470588236e-05, |
|
"loss": 1.4808, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 34.09, |
|
"learning_rate": 1.2352941176470587e-05, |
|
"loss": 1.5574, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 34.09, |
|
"eval_denotation_accuracy": 0.42352525609325326, |
|
"eval_loss": 2.4281280040740967, |
|
"eval_runtime": 1744.8445, |
|
"eval_samples_per_second": 1.622, |
|
"eval_steps_per_second": 0.101, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 34.2, |
|
"learning_rate": 1.226470588235294e-05, |
|
"loss": 1.4832, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 34.32, |
|
"learning_rate": 1.2176470588235294e-05, |
|
"loss": 1.4815, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 34.43, |
|
"learning_rate": 1.2088235294117647e-05, |
|
"loss": 1.4813, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 34.54, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.4826, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 34.66, |
|
"learning_rate": 1.1911764705882352e-05, |
|
"loss": 1.4826, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 34.77, |
|
"learning_rate": 1.1823529411764705e-05, |
|
"loss": 1.4812, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 34.88, |
|
"learning_rate": 1.1735294117647058e-05, |
|
"loss": 1.48, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 34.99, |
|
"learning_rate": 1.1647058823529412e-05, |
|
"loss": 1.4818, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 35.11, |
|
"learning_rate": 1.1558823529411765e-05, |
|
"loss": 1.5577, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 35.23, |
|
"learning_rate": 1.1470588235294117e-05, |
|
"loss": 1.4789, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 35.34, |
|
"learning_rate": 1.138235294117647e-05, |
|
"loss": 1.477, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 35.45, |
|
"learning_rate": 1.1294117647058823e-05, |
|
"loss": 1.4778, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 35.56, |
|
"learning_rate": 1.1205882352941177e-05, |
|
"loss": 1.476, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 35.68, |
|
"learning_rate": 1.111764705882353e-05, |
|
"loss": 1.4799, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 35.79, |
|
"learning_rate": 1.1029411764705883e-05, |
|
"loss": 1.4747, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 35.9, |
|
"learning_rate": 1.0941176470588235e-05, |
|
"loss": 1.4748, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 36.02, |
|
"learning_rate": 1.0852941176470588e-05, |
|
"loss": 1.5561, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 36.14, |
|
"learning_rate": 1.0764705882352941e-05, |
|
"loss": 1.4779, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 36.25, |
|
"learning_rate": 1.0676470588235295e-05, |
|
"loss": 1.4774, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 36.36, |
|
"learning_rate": 1.0588235294117648e-05, |
|
"loss": 1.4743, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 36.47, |
|
"learning_rate": 1.05e-05, |
|
"loss": 1.4765, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 36.59, |
|
"learning_rate": 1.0411764705882353e-05, |
|
"loss": 1.4755, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 36.7, |
|
"learning_rate": 1.0323529411764706e-05, |
|
"loss": 1.4796, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 36.81, |
|
"learning_rate": 1.023529411764706e-05, |
|
"loss": 1.4723, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 36.93, |
|
"learning_rate": 1.0147058823529413e-05, |
|
"loss": 1.4781, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 37.05, |
|
"learning_rate": 1.0058823529411764e-05, |
|
"loss": 1.5533, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 37.16, |
|
"learning_rate": 9.970588235294117e-06, |
|
"loss": 1.4733, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 37.27, |
|
"learning_rate": 9.88235294117647e-06, |
|
"loss": 1.4761, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 37.38, |
|
"learning_rate": 9.794117647058824e-06, |
|
"loss": 1.4738, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 37.5, |
|
"learning_rate": 9.705882352941177e-06, |
|
"loss": 1.4731, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 37.61, |
|
"learning_rate": 9.61764705882353e-06, |
|
"loss": 1.4733, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 37.72, |
|
"learning_rate": 9.529411764705882e-06, |
|
"loss": 1.4738, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 37.84, |
|
"learning_rate": 9.441176470588235e-06, |
|
"loss": 1.4742, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 37.95, |
|
"learning_rate": 9.352941176470589e-06, |
|
"loss": 1.4721, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 38.07, |
|
"learning_rate": 9.264705882352942e-06, |
|
"loss": 1.5433, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 38.18, |
|
"learning_rate": 9.176470588235295e-06, |
|
"loss": 1.4746, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 38.29, |
|
"learning_rate": 9.088235294117647e-06, |
|
"loss": 1.4754, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 38.41, |
|
"learning_rate": 9e-06, |
|
"loss": 1.4758, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 38.52, |
|
"learning_rate": 8.911764705882354e-06, |
|
"loss": 1.4705, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 38.63, |
|
"learning_rate": 8.823529411764707e-06, |
|
"loss": 1.4709, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 38.75, |
|
"learning_rate": 8.73529411764706e-06, |
|
"loss": 1.4717, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 38.86, |
|
"learning_rate": 8.647058823529412e-06, |
|
"loss": 1.4741, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 38.97, |
|
"learning_rate": 8.558823529411765e-06, |
|
"loss": 1.47, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 39.09, |
|
"learning_rate": 8.470588235294118e-06, |
|
"loss": 1.543, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 39.2, |
|
"learning_rate": 8.382352941176472e-06, |
|
"loss": 1.4719, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 39.32, |
|
"learning_rate": 8.294117647058825e-06, |
|
"loss": 1.4715, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 39.43, |
|
"learning_rate": 8.205882352941178e-06, |
|
"loss": 1.4681, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 39.54, |
|
"learning_rate": 8.11764705882353e-06, |
|
"loss": 1.4733, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 39.66, |
|
"learning_rate": 8.029411764705883e-06, |
|
"loss": 1.4716, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 39.77, |
|
"learning_rate": 7.941176470588236e-06, |
|
"loss": 1.4694, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 39.88, |
|
"learning_rate": 7.85294117647059e-06, |
|
"loss": 1.4726, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 39.99, |
|
"learning_rate": 7.764705882352943e-06, |
|
"loss": 1.4738, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 40.11, |
|
"learning_rate": 7.676470588235294e-06, |
|
"loss": 1.5443, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 40.23, |
|
"learning_rate": 7.588235294117647e-06, |
|
"loss": 1.4713, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 40.34, |
|
"learning_rate": 7.5e-06, |
|
"loss": 1.4673, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 40.45, |
|
"learning_rate": 7.4117647058823535e-06, |
|
"loss": 1.4705, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 40.56, |
|
"learning_rate": 7.323529411764706e-06, |
|
"loss": 1.4695, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 40.68, |
|
"learning_rate": 7.235294117647059e-06, |
|
"loss": 1.4678, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 40.79, |
|
"learning_rate": 7.147058823529412e-06, |
|
"loss": 1.4679, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 40.9, |
|
"learning_rate": 7.058823529411765e-06, |
|
"loss": 1.4693, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 41.02, |
|
"learning_rate": 6.970588235294118e-06, |
|
"loss": 1.5428, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 41.14, |
|
"learning_rate": 6.882352941176471e-06, |
|
"loss": 1.4741, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 41.25, |
|
"learning_rate": 6.794117647058824e-06, |
|
"loss": 1.4677, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 41.36, |
|
"learning_rate": 6.705882352941177e-06, |
|
"loss": 1.4695, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 41.47, |
|
"learning_rate": 6.61764705882353e-06, |
|
"loss": 1.4659, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 41.59, |
|
"learning_rate": 6.529411764705883e-06, |
|
"loss": 1.4693, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 41.7, |
|
"learning_rate": 6.441176470588235e-06, |
|
"loss": 1.469, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 41.81, |
|
"learning_rate": 6.352941176470589e-06, |
|
"loss": 1.466, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 41.93, |
|
"learning_rate": 6.264705882352942e-06, |
|
"loss": 1.4688, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 42.05, |
|
"learning_rate": 6.176470588235294e-06, |
|
"loss": 1.5432, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 42.16, |
|
"learning_rate": 6.088235294117647e-06, |
|
"loss": 1.4697, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 42.27, |
|
"learning_rate": 6e-06, |
|
"loss": 1.4696, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 42.38, |
|
"learning_rate": 5.911764705882353e-06, |
|
"loss": 1.4661, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 42.5, |
|
"learning_rate": 5.823529411764706e-06, |
|
"loss": 1.4701, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 42.61, |
|
"learning_rate": 5.735294117647058e-06, |
|
"loss": 1.4664, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 42.72, |
|
"learning_rate": 5.647058823529412e-06, |
|
"loss": 1.4661, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 42.84, |
|
"learning_rate": 5.558823529411765e-06, |
|
"loss": 1.4668, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 42.95, |
|
"learning_rate": 5.470588235294117e-06, |
|
"loss": 1.4688, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 43.07, |
|
"learning_rate": 5.382352941176471e-06, |
|
"loss": 1.5398, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 43.18, |
|
"learning_rate": 5.294117647058824e-06, |
|
"loss": 1.4654, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 43.29, |
|
"learning_rate": 5.205882352941176e-06, |
|
"loss": 1.4637, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 43.41, |
|
"learning_rate": 5.11764705882353e-06, |
|
"loss": 1.4706, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 43.52, |
|
"learning_rate": 5.029411764705882e-06, |
|
"loss": 1.4638, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 43.63, |
|
"learning_rate": 4.941176470588235e-06, |
|
"loss": 1.4671, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 43.75, |
|
"learning_rate": 4.852941176470589e-06, |
|
"loss": 1.4681, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 43.86, |
|
"learning_rate": 4.764705882352941e-06, |
|
"loss": 1.467, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 43.97, |
|
"learning_rate": 4.676470588235294e-06, |
|
"loss": 1.4666, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 44.09, |
|
"learning_rate": 4.588235294117648e-06, |
|
"loss": 1.5401, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 44.2, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.4642, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 44.32, |
|
"learning_rate": 4.411764705882353e-06, |
|
"loss": 1.466, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 44.43, |
|
"learning_rate": 4.323529411764706e-06, |
|
"loss": 1.4647, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 44.54, |
|
"learning_rate": 4.235294117647059e-06, |
|
"loss": 1.4689, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 44.66, |
|
"learning_rate": 4.147058823529412e-06, |
|
"loss": 1.4642, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 44.77, |
|
"learning_rate": 4.058823529411765e-06, |
|
"loss": 1.4696, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 44.88, |
|
"learning_rate": 3.970588235294118e-06, |
|
"loss": 1.4666, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 44.99, |
|
"learning_rate": 3.8823529411764714e-06, |
|
"loss": 1.4678, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 45.11, |
|
"learning_rate": 3.7941176470588235e-06, |
|
"loss": 1.5383, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 45.23, |
|
"learning_rate": 3.7058823529411767e-06, |
|
"loss": 1.4664, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 45.34, |
|
"learning_rate": 3.6176470588235296e-06, |
|
"loss": 1.4645, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 45.45, |
|
"learning_rate": 3.5294117647058825e-06, |
|
"loss": 1.4656, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 45.45, |
|
"eval_denotation_accuracy": 0.4959378311550689, |
|
"eval_loss": 2.4232473373413086, |
|
"eval_runtime": 1762.2635, |
|
"eval_samples_per_second": 1.606, |
|
"eval_steps_per_second": 0.1, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 45.56, |
|
"learning_rate": 3.4411764705882353e-06, |
|
"loss": 1.4646, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 45.68, |
|
"learning_rate": 3.3529411764705886e-06, |
|
"loss": 1.4656, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 45.79, |
|
"learning_rate": 3.2647058823529415e-06, |
|
"loss": 1.4653, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 45.9, |
|
"learning_rate": 3.1764705882352943e-06, |
|
"loss": 1.4659, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 46.02, |
|
"learning_rate": 3.088235294117647e-06, |
|
"loss": 1.5391, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 46.14, |
|
"learning_rate": 3e-06, |
|
"loss": 1.4667, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 46.25, |
|
"learning_rate": 2.911764705882353e-06, |
|
"loss": 1.4653, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 46.36, |
|
"learning_rate": 2.823529411764706e-06, |
|
"loss": 1.4646, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 46.47, |
|
"learning_rate": 2.7352941176470587e-06, |
|
"loss": 1.4662, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 46.59, |
|
"learning_rate": 2.647058823529412e-06, |
|
"loss": 1.4678, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 46.7, |
|
"learning_rate": 2.558823529411765e-06, |
|
"loss": 1.4651, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 46.81, |
|
"learning_rate": 2.4705882352941177e-06, |
|
"loss": 1.4628, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 46.93, |
|
"learning_rate": 2.3823529411764705e-06, |
|
"loss": 1.4637, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 47.05, |
|
"learning_rate": 2.294117647058824e-06, |
|
"loss": 1.5399, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 47.16, |
|
"learning_rate": 2.2058823529411767e-06, |
|
"loss": 1.4615, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 47.27, |
|
"learning_rate": 2.1176470588235296e-06, |
|
"loss": 1.4641, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 47.38, |
|
"learning_rate": 2.0294117647058824e-06, |
|
"loss": 1.4653, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 47.5, |
|
"learning_rate": 1.9411764705882357e-06, |
|
"loss": 1.4626, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 47.61, |
|
"learning_rate": 1.8529411764705884e-06, |
|
"loss": 1.4606, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 47.72, |
|
"learning_rate": 1.7647058823529412e-06, |
|
"loss": 1.4667, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 47.84, |
|
"learning_rate": 1.6764705882352943e-06, |
|
"loss": 1.4606, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 47.95, |
|
"learning_rate": 1.5882352941176472e-06, |
|
"loss": 1.4652, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 48.07, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.5363, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 48.18, |
|
"learning_rate": 1.411764705882353e-06, |
|
"loss": 1.4629, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 48.29, |
|
"learning_rate": 1.323529411764706e-06, |
|
"loss": 1.4609, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 48.41, |
|
"learning_rate": 1.2352941176470588e-06, |
|
"loss": 1.4615, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 48.52, |
|
"learning_rate": 1.147058823529412e-06, |
|
"loss": 1.4631, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 48.63, |
|
"learning_rate": 1.0588235294117648e-06, |
|
"loss": 1.4639, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 48.75, |
|
"learning_rate": 9.705882352941179e-07, |
|
"loss": 1.4647, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 48.86, |
|
"learning_rate": 8.823529411764706e-07, |
|
"loss": 1.4614, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 48.97, |
|
"learning_rate": 7.941176470588236e-07, |
|
"loss": 1.4641, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 49.09, |
|
"learning_rate": 7.058823529411765e-07, |
|
"loss": 1.537, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 49.2, |
|
"learning_rate": 6.176470588235294e-07, |
|
"loss": 1.4627, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 49.32, |
|
"learning_rate": 5.294117647058824e-07, |
|
"loss": 1.4662, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 49.43, |
|
"learning_rate": 4.411764705882353e-07, |
|
"loss": 1.4648, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 49.54, |
|
"learning_rate": 3.529411764705882e-07, |
|
"loss": 1.4633, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 49.66, |
|
"learning_rate": 2.647058823529412e-07, |
|
"loss": 1.4631, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 49.77, |
|
"learning_rate": 1.764705882352941e-07, |
|
"loss": 1.4604, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 49.88, |
|
"learning_rate": 8.823529411764706e-08, |
|
"loss": 1.4621, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 49.99, |
|
"learning_rate": 0.0, |
|
"loss": 1.4644, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 49.99, |
|
"step": 4400, |
|
"total_flos": 1.218502107627307e+18, |
|
"train_loss": 1.6152976189960133, |
|
"train_runtime": 26687.1063, |
|
"train_samples_per_second": 21.211, |
|
"train_steps_per_second": 0.165 |
|
} |
|
], |
|
"max_steps": 4400, |
|
"num_train_epochs": 50, |
|
"total_flos": 1.218502107627307e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|