|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"global_step": 1382, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.7341040462427746e-06, |
|
"loss": 6.1875, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.468208092485549e-06, |
|
"loss": 5.6434, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5.202312138728324e-06, |
|
"loss": 5.1445, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.936416184971098e-06, |
|
"loss": 4.9082, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8.670520231213873e-06, |
|
"loss": 4.6742, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.0404624277456647e-05, |
|
"loss": 4.0891, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.2138728323699422e-05, |
|
"loss": 3.7152, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.3872832369942197e-05, |
|
"loss": 3.3334, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.560693641618497e-05, |
|
"loss": 2.2468, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.7341040462427746e-05, |
|
"loss": 1.7937, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9075144508670522e-05, |
|
"loss": 1.8321, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.0809248554913295e-05, |
|
"loss": 1.4568, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.2543352601156068e-05, |
|
"loss": 1.3031, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.4277456647398844e-05, |
|
"loss": 1.0809, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.601156069364162e-05, |
|
"loss": 1.4456, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.7745664739884393e-05, |
|
"loss": 1.5199, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.9479768786127166e-05, |
|
"loss": 0.9764, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.121387283236994e-05, |
|
"loss": 1.2064, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.294797687861272e-05, |
|
"loss": 1.0915, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.468208092485549e-05, |
|
"loss": 1.3032, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.6416184971098265e-05, |
|
"loss": 1.0709, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.8150289017341044e-05, |
|
"loss": 1.6387, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.988439306358381e-05, |
|
"loss": 1.2317, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.161849710982659e-05, |
|
"loss": 1.0637, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.335260115606937e-05, |
|
"loss": 1.0287, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.5086705202312136e-05, |
|
"loss": 1.074, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.6820809248554915e-05, |
|
"loss": 0.868, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.855491329479769e-05, |
|
"loss": 0.8482, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.028901734104046e-05, |
|
"loss": 1.2787, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.202312138728324e-05, |
|
"loss": 1.1221, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.3757225433526014e-05, |
|
"loss": 0.9836, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.5491329479768787e-05, |
|
"loss": 1.5154, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.7225433526011566e-05, |
|
"loss": 0.9228, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.895953757225433e-05, |
|
"loss": 0.5593, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.976833976833977e-05, |
|
"loss": 0.8908, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.9189189189189195e-05, |
|
"loss": 0.9526, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.861003861003862e-05, |
|
"loss": 1.2775, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.8030888030888026e-05, |
|
"loss": 1.1524, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 5.745173745173745e-05, |
|
"loss": 1.2414, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.687258687258687e-05, |
|
"loss": 1.1137, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.629343629343629e-05, |
|
"loss": 0.9212, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.5714285714285715e-05, |
|
"loss": 1.0503, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.513513513513514e-05, |
|
"loss": 0.9738, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.455598455598455e-05, |
|
"loss": 1.2703, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.3976833976833975e-05, |
|
"loss": 1.086, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.33976833976834e-05, |
|
"loss": 0.8879, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.281853281853282e-05, |
|
"loss": 0.8404, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.223938223938224e-05, |
|
"loss": 0.775, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.166023166023166e-05, |
|
"loss": 1.2602, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 5.108108108108108e-05, |
|
"loss": 1.2931, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 5.05019305019305e-05, |
|
"loss": 1.1383, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.9922779922779924e-05, |
|
"loss": 0.7263, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.9343629343629346e-05, |
|
"loss": 0.5444, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.876447876447877e-05, |
|
"loss": 0.8266, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.8185328185328184e-05, |
|
"loss": 1.0101, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.7606177606177606e-05, |
|
"loss": 0.8983, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.702702702702703e-05, |
|
"loss": 0.5349, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.644787644787645e-05, |
|
"loss": 0.7786, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.586872586872587e-05, |
|
"loss": 0.8275, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.5289575289575295e-05, |
|
"loss": 0.8844, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.471042471042471e-05, |
|
"loss": 1.1885, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.413127413127413e-05, |
|
"loss": 0.8453, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.3552123552123555e-05, |
|
"loss": 0.7368, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.297297297297298e-05, |
|
"loss": 1.0366, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.23938223938224e-05, |
|
"loss": 0.4579, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.181467181467182e-05, |
|
"loss": 0.873, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.123552123552124e-05, |
|
"loss": 1.0861, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.065637065637066e-05, |
|
"loss": 0.7572, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.0077220077220075e-05, |
|
"loss": 0.7174, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.94980694980695e-05, |
|
"loss": 0.4577, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.891891891891892e-05, |
|
"loss": 0.6622, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.833976833976834e-05, |
|
"loss": 0.3438, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.776061776061776e-05, |
|
"loss": 0.7274, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.718146718146718e-05, |
|
"loss": 0.6925, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.66023166023166e-05, |
|
"loss": 0.6156, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.6023166023166024e-05, |
|
"loss": 0.8509, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.5444015444015446e-05, |
|
"loss": 0.5898, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.486486486486486e-05, |
|
"loss": 0.7896, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.4285714285714284e-05, |
|
"loss": 0.6095, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.3706563706563706e-05, |
|
"loss": 0.7593, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.312741312741313e-05, |
|
"loss": 0.7934, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.254826254826255e-05, |
|
"loss": 0.6189, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.196911196911197e-05, |
|
"loss": 0.8726, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.138996138996139e-05, |
|
"loss": 1.0715, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.081081081081081e-05, |
|
"loss": 0.8481, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.0231660231660233e-05, |
|
"loss": 0.906, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.9652509652509655e-05, |
|
"loss": 0.727, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.9073359073359074e-05, |
|
"loss": 1.1817, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8494208494208496e-05, |
|
"loss": 0.8072, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.7915057915057918e-05, |
|
"loss": 0.9308, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.7335907335907337e-05, |
|
"loss": 0.2952, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.6756756756756756e-05, |
|
"loss": 0.7024, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.6177606177606178e-05, |
|
"loss": 0.6619, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.5598455598455597e-05, |
|
"loss": 0.5304, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.501930501930502e-05, |
|
"loss": 0.4559, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.444015444015444e-05, |
|
"loss": 0.7422, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.386100386100386e-05, |
|
"loss": 0.7257, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.3281853281853282e-05, |
|
"loss": 0.6573, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.2702702702702705e-05, |
|
"loss": 0.7282, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.2123552123552123e-05, |
|
"loss": 0.6065, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.1544401544401546e-05, |
|
"loss": 0.5585, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.0965250965250968e-05, |
|
"loss": 0.8342, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.0386100386100387e-05, |
|
"loss": 0.7599, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.980694980694981e-05, |
|
"loss": 0.7969, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.922779922779923e-05, |
|
"loss": 0.5474, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.864864864864865e-05, |
|
"loss": 0.8421, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.806949806949807e-05, |
|
"loss": 0.565, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.7490347490347488e-05, |
|
"loss": 0.3672, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.691119691119691e-05, |
|
"loss": 0.505, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.6332046332046332e-05, |
|
"loss": 0.5057, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.575289575289575e-05, |
|
"loss": 0.7104, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.5173745173745173e-05, |
|
"loss": 0.6253, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.4594594594594596e-05, |
|
"loss": 0.9309, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.4015444015444016e-05, |
|
"loss": 0.7422, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.3436293436293437e-05, |
|
"loss": 0.6471, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.2857142857142857e-05, |
|
"loss": 0.7338, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.227799227799228e-05, |
|
"loss": 0.874, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.1698841698841698e-05, |
|
"loss": 0.7624, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.1119691119691119e-05, |
|
"loss": 0.7777, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.0540540540540541e-05, |
|
"loss": 0.8091, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 9.961389961389962e-06, |
|
"loss": 0.9177, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 9.382239382239382e-06, |
|
"loss": 1.0223, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.803088803088804e-06, |
|
"loss": 0.2531, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.223938223938225e-06, |
|
"loss": 0.6171, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.644787644787645e-06, |
|
"loss": 0.6996, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.065637065637066e-06, |
|
"loss": 0.5205, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.486486486486487e-06, |
|
"loss": 0.65, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 5.907335907335907e-06, |
|
"loss": 0.5435, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 5.3281853281853285e-06, |
|
"loss": 0.5957, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.749034749034749e-06, |
|
"loss": 0.5854, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.1698841698841696e-06, |
|
"loss": 0.779, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.5907335907335905e-06, |
|
"loss": 0.4551, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.0115830115830115e-06, |
|
"loss": 0.7719, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 2.4324324324324325e-06, |
|
"loss": 0.5182, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.8532818532818534e-06, |
|
"loss": 0.6974, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.2741312741312742e-06, |
|
"loss": 0.5345, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.949806949806949e-07, |
|
"loss": 0.6196, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.1583011583011584e-07, |
|
"loss": 0.6318, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 1382, |
|
"total_flos": 5.175834887457866e+18, |
|
"train_loss": 1.088201597354519, |
|
"train_runtime": 823.8622, |
|
"train_samples_per_second": 214.963, |
|
"train_steps_per_second": 1.677 |
|
} |
|
], |
|
"max_steps": 1382, |
|
"num_train_epochs": 2, |
|
"total_flos": 5.175834887457866e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|