|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.8934554389099844, |
|
"global_step": 1000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.941176470588235e-07, |
|
"loss": 2.6188, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.88235294117647e-07, |
|
"loss": 2.4129, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.823529411764705e-07, |
|
"loss": 2.1366, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.999245478090202e-07, |
|
"loss": 1.8696, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.994635335507661e-07, |
|
"loss": 1.6629, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.985838089271184e-07, |
|
"loss": 1.5366, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.972861114289922e-07, |
|
"loss": 1.4687, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.955715289424037e-07, |
|
"loss": 1.398, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.934414988364722e-07, |
|
"loss": 1.3378, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.908978067584452e-07, |
|
"loss": 1.2982, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.879425851367544e-07, |
|
"loss": 1.2802, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.845783113933575e-07, |
|
"loss": 1.2718, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.808078058668652e-07, |
|
"loss": 1.231, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.766342294481949e-07, |
|
"loss": 1.2435, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.720610809307313e-07, |
|
"loss": 1.2031, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.670921940772186e-07, |
|
"loss": 1.2031, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.617317344058378e-07, |
|
"loss": 1.1938, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.559841956981707e-07, |
|
"loss": 1.2037, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.498543962319708e-07, |
|
"loss": 1.182, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.433474747419042e-07, |
|
"loss": 1.1655, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.364688861116443e-07, |
|
"loss": 1.147, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.29224396800933e-07, |
|
"loss": 1.147, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.216200800114411e-07, |
|
"loss": 1.1768, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.136623105954802e-07, |
|
"loss": 1.1543, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.053577597118358e-07, |
|
"loss": 1.1404, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.967133892331999e-07, |
|
"loss": 1.1323, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.877364459098931e-07, |
|
"loss": 1.136, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.784344552947665e-07, |
|
"loss": 1.1469, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.688152154343793e-07, |
|
"loss": 1.1319, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.588867903317394e-07, |
|
"loss": 1.1288, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 8.486575031860859e-07, |
|
"loss": 1.1225, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.381359294153838e-07, |
|
"loss": 1.1105, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.27330889467378e-07, |
|
"loss": 1.1283, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.16251441425233e-07, |
|
"loss": 1.107, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.049068734139603e-07, |
|
"loss": 1.1044, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.933066958139942e-07, |
|
"loss": 1.1316, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.814606332884488e-07, |
|
"loss": 1.0889, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.69378616630736e-07, |
|
"loss": 1.0929, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 7.570707744393813e-07, |
|
"loss": 1.1371, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.445474246270149e-07, |
|
"loss": 1.1332, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.318190657706574e-07, |
|
"loss": 1.0957, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.188963683105504e-07, |
|
"loss": 1.0889, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.057901656049109e-07, |
|
"loss": 1.1067, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.925114448481088e-07, |
|
"loss": 1.1219, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.790713378598802e-07, |
|
"loss": 1.1267, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 6.65481111753297e-07, |
|
"loss": 1.087, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 6.517521594893197e-07, |
|
"loss": 1.0872, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 6.378959903258474e-07, |
|
"loss": 1.0818, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.239242201692752e-07, |
|
"loss": 1.0856, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.098485618366447e-07, |
|
"loss": 1.0804, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.956808152365532e-07, |
|
"loss": 1.0859, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.814328574770522e-07, |
|
"loss": 1.088, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.671166329088277e-07, |
|
"loss": 1.0791, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.527441431120099e-07, |
|
"loss": 1.0885, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.383274368350061e-07, |
|
"loss": 1.0808, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.238785998937924e-07, |
|
"loss": 1.0586, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.094097450401282e-07, |
|
"loss": 1.0984, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.949330018071946e-07, |
|
"loss": 1.0646, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.804605063411592e-07, |
|
"loss": 1.0581, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.660043912272015e-07, |
|
"loss": 1.0679, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.515767753185208e-07, |
|
"loss": 1.1003, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.3718975357685725e-07, |
|
"loss": 1.0751, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.228553869330412e-07, |
|
"loss": 1.0733, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0858569217607086e-07, |
|
"loss": 1.0505, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.943926318791963e-07, |
|
"loss": 1.0813, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.8028810437145214e-07, |
|
"loss": 1.0671, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.66283933763048e-07, |
|
"loss": 1.0648, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.523918600329788e-07, |
|
"loss": 1.073, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.386235291871625e-07, |
|
"loss": 1.0672, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.249904834953584e-07, |
|
"loss": 1.0666, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.115041518150502e-07, |
|
"loss": 1.0644, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.981758400104028e-07, |
|
"loss": 1.0637, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.850167214743282e-07, |
|
"loss": 1.064, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.7203782776160623e-07, |
|
"loss": 1.0665, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.592500393409066e-07, |
|
"loss": 1.0707, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.466640764734754e-07, |
|
"loss": 1.0721, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.3429049022612086e-07, |
|
"loss": 1.0674, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.2213965362604315e-07, |
|
"loss": 1.0978, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.1022175296491512e-07, |
|
"loss": 1.0692, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.985467792595083e-07, |
|
"loss": 1.0741, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.8712451987602202e-07, |
|
"loss": 1.0537, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7596455032513707e-07, |
|
"loss": 1.071, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.650762262346713e-07, |
|
"loss": 1.0652, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.5446867550656767e-07, |
|
"loss": 1.0636, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.441507906647905e-07, |
|
"loss": 1.0744, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.3413122140054217e-07, |
|
"loss": 1.0672, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.244183673210532e-07, |
|
"loss": 1.0683, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.1502037090802153e-07, |
|
"loss": 1.074, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0594511069160512e-07, |
|
"loss": 1.0427, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.720019464569218e-08, |
|
"loss": 1.058, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.879295380998242e-08, |
|
"loss": 1.0496, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.073043614422786e-08, |
|
"loss": 1.0498, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.301940061978722e-08, |
|
"loss": 1.0746, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.566631155344244e-08, |
|
"loss": 1.0475, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 5.867733318823226e-08, |
|
"loss": 1.0835, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.2058324525842414e-08, |
|
"loss": 1.0422, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.5814834414886264e-08, |
|
"loss": 1.0385, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.995209689919282e-08, |
|
"loss": 1.0351, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.44750268300027e-08, |
|
"loss": 1.0523, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.9388215745748345e-08, |
|
"loss": 1.062, |
|
"step": 1000 |
|
} |
|
], |
|
"max_steps": 1119, |
|
"num_train_epochs": 1, |
|
"total_flos": 2.935716160030638e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|