|
{ |
|
"best_metric": 0.5704381465911865, |
|
"best_model_checkpoint": "ckpt/llama2_13b_fuze30_no_sys/news_commentary_es/checkpoint-3000", |
|
"epoch": 5.0, |
|
"eval_steps": 200, |
|
"global_step": 3985, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.5029562711715698, |
|
"learning_rate": 5e-06, |
|
"loss": 1.2458, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.6982043981552124, |
|
"learning_rate": 1e-05, |
|
"loss": 1.266, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.7867264151573181, |
|
"learning_rate": 9.99984305369385e-06, |
|
"loss": 1.2072, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.7258906364440918, |
|
"learning_rate": 9.999372224628255e-06, |
|
"loss": 1.0755, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.6007847189903259, |
|
"learning_rate": 9.998587542361168e-06, |
|
"loss": 0.9002, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3854537606239319, |
|
"learning_rate": 9.997489056153784e-06, |
|
"loss": 0.7568, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3243318796157837, |
|
"learning_rate": 9.996076834967442e-06, |
|
"loss": 0.7157, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.24546250700950623, |
|
"learning_rate": 9.994350967459303e-06, |
|
"loss": 0.6984, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.34287428855895996, |
|
"learning_rate": 9.99231156197678e-06, |
|
"loss": 0.6996, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3073829412460327, |
|
"learning_rate": 9.989958746550732e-06, |
|
"loss": 0.7129, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.2948177456855774, |
|
"learning_rate": 9.987292668887439e-06, |
|
"loss": 0.6977, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3140040934085846, |
|
"learning_rate": 9.984313496359317e-06, |
|
"loss": 0.6272, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.2903355658054352, |
|
"learning_rate": 9.981021415994413e-06, |
|
"loss": 0.6285, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3696918189525604, |
|
"learning_rate": 9.97741663446467e-06, |
|
"loss": 0.651, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.24330583214759827, |
|
"learning_rate": 9.973499378072947e-06, |
|
"loss": 0.6376, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.26600131392478943, |
|
"learning_rate": 9.96926989273881e-06, |
|
"loss": 0.6402, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.34049201011657715, |
|
"learning_rate": 9.9647284439831e-06, |
|
"loss": 0.6662, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3909376859664917, |
|
"learning_rate": 9.959875316911261e-06, |
|
"loss": 0.6134, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.3953985869884491, |
|
"learning_rate": 9.95471081619544e-06, |
|
"loss": 0.6054, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.42383915185928345, |
|
"learning_rate": 9.949235266055356e-06, |
|
"loss": 0.6296, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 0.6343660950660706, |
|
"eval_runtime": 71.7569, |
|
"eval_samples_per_second": 62.712, |
|
"eval_steps_per_second": 1.965, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.37584221363067627, |
|
"learning_rate": 9.943449010237965e-06, |
|
"loss": 0.6724, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.368352472782135, |
|
"learning_rate": 9.937352411995851e-06, |
|
"loss": 0.6065, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.30686938762664795, |
|
"learning_rate": 9.930945854064447e-06, |
|
"loss": 0.5998, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.2913750410079956, |
|
"learning_rate": 9.924229738637992e-06, |
|
"loss": 0.6274, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.30744606256484985, |
|
"learning_rate": 9.91720448734429e-06, |
|
"loss": 0.6442, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.315236359834671, |
|
"learning_rate": 9.909870541218238e-06, |
|
"loss": 0.6026, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.28183722496032715, |
|
"learning_rate": 9.902228360674137e-06, |
|
"loss": 0.6511, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.33093082904815674, |
|
"learning_rate": 9.89427842547679e-06, |
|
"loss": 0.6575, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.3711361587047577, |
|
"learning_rate": 9.886021234711382e-06, |
|
"loss": 0.6209, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.35551759600639343, |
|
"learning_rate": 9.877457306752149e-06, |
|
"loss": 0.6265, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.3064817786216736, |
|
"learning_rate": 9.868587179229834e-06, |
|
"loss": 0.6214, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.4653981924057007, |
|
"learning_rate": 9.859411408997939e-06, |
|
"loss": 0.6234, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.3303908407688141, |
|
"learning_rate": 9.84993057209776e-06, |
|
"loss": 0.6502, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.3864089548587799, |
|
"learning_rate": 9.84014526372223e-06, |
|
"loss": 0.6275, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.3393557071685791, |
|
"learning_rate": 9.83005609817855e-06, |
|
"loss": 0.5913, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.3689006567001343, |
|
"learning_rate": 9.819663708849624e-06, |
|
"loss": 0.5962, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.32422763109207153, |
|
"learning_rate": 9.808968748154302e-06, |
|
"loss": 0.5896, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.3583005964756012, |
|
"learning_rate": 9.797971887506413e-06, |
|
"loss": 0.617, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.331403911113739, |
|
"learning_rate": 9.786673817272621e-06, |
|
"loss": 0.5994, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.3698234558105469, |
|
"learning_rate": 9.77507524672908e-06, |
|
"loss": 0.6272, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 0.612372875213623, |
|
"eval_runtime": 68.9986, |
|
"eval_samples_per_second": 65.219, |
|
"eval_steps_per_second": 2.044, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.3716101348400116, |
|
"learning_rate": 9.763176904016914e-06, |
|
"loss": 0.6246, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.4471350610256195, |
|
"learning_rate": 9.750979536096495e-06, |
|
"loss": 0.645, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.31366682052612305, |
|
"learning_rate": 9.738483908700563e-06, |
|
"loss": 0.6397, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.3895745575428009, |
|
"learning_rate": 9.72569080628614e-06, |
|
"loss": 0.6118, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.42779144644737244, |
|
"learning_rate": 9.71260103198529e-06, |
|
"loss": 0.6359, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.37752124667167664, |
|
"learning_rate": 9.699215407554712e-06, |
|
"loss": 0.5863, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.43578141927719116, |
|
"learning_rate": 9.685534773324124e-06, |
|
"loss": 0.6244, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.5181850790977478, |
|
"learning_rate": 9.671559988143529e-06, |
|
"loss": 0.5962, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.43225836753845215, |
|
"learning_rate": 9.657291929329293e-06, |
|
"loss": 0.631, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.36941251158714294, |
|
"learning_rate": 9.64273149260907e-06, |
|
"loss": 0.6398, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.3803509473800659, |
|
"learning_rate": 9.627879592065558e-06, |
|
"loss": 0.6393, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.3586438298225403, |
|
"learning_rate": 9.612737160079134e-06, |
|
"loss": 0.5944, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.41518551111221313, |
|
"learning_rate": 9.597305147269303e-06, |
|
"loss": 0.6224, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.3281481862068176, |
|
"learning_rate": 9.581584522435025e-06, |
|
"loss": 0.5853, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.3723869323730469, |
|
"learning_rate": 9.565576272493902e-06, |
|
"loss": 0.591, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.37543514370918274, |
|
"learning_rate": 9.549281402420212e-06, |
|
"loss": 0.5883, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.39954090118408203, |
|
"learning_rate": 9.532700935181822e-06, |
|
"loss": 0.5942, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.4060945212841034, |
|
"learning_rate": 9.515835911675968e-06, |
|
"loss": 0.6082, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.32310950756073, |
|
"learning_rate": 9.498687390663905e-06, |
|
"loss": 0.5836, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.37066149711608887, |
|
"learning_rate": 9.481256448704445e-06, |
|
"loss": 0.6167, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"eval_loss": 0.6020291447639465, |
|
"eval_runtime": 71.3713, |
|
"eval_samples_per_second": 63.051, |
|
"eval_steps_per_second": 1.976, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.38338422775268555, |
|
"learning_rate": 9.463544180086368e-06, |
|
"loss": 0.6211, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.3963073790073395, |
|
"learning_rate": 9.445551696759732e-06, |
|
"loss": 0.5827, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.4199773967266083, |
|
"learning_rate": 9.427280128266049e-06, |
|
"loss": 0.5782, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.39564579725265503, |
|
"learning_rate": 9.4087306216674e-06, |
|
"loss": 0.6028, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.43293875455856323, |
|
"learning_rate": 9.389904341474394e-06, |
|
"loss": 0.5791, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.3330543041229248, |
|
"learning_rate": 9.370802469573088e-06, |
|
"loss": 0.5875, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.4487074613571167, |
|
"learning_rate": 9.351426205150778e-06, |
|
"loss": 0.6148, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.4278540015220642, |
|
"learning_rate": 9.331776764620711e-06, |
|
"loss": 0.6271, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.4620065987110138, |
|
"learning_rate": 9.311855381545735e-06, |
|
"loss": 0.6439, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.5026012063026428, |
|
"learning_rate": 9.291663306560843e-06, |
|
"loss": 0.614, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.3847663700580597, |
|
"learning_rate": 9.271201807294667e-06, |
|
"loss": 0.6058, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.38854289054870605, |
|
"learning_rate": 9.250472168289901e-06, |
|
"loss": 0.5859, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.4080798625946045, |
|
"learning_rate": 9.22947569092265e-06, |
|
"loss": 0.5874, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.587581992149353, |
|
"learning_rate": 9.20821369332074e-06, |
|
"loss": 0.6077, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.5180934071540833, |
|
"learning_rate": 9.186687510280968e-06, |
|
"loss": 0.6005, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.4063900411128998, |
|
"learning_rate": 9.164898493185295e-06, |
|
"loss": 0.6007, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.3806501626968384, |
|
"learning_rate": 9.142848009916025e-06, |
|
"loss": 0.6, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.4454844892024994, |
|
"learning_rate": 9.120537444769912e-06, |
|
"loss": 0.5704, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.4750034511089325, |
|
"learning_rate": 9.097968198371277e-06, |
|
"loss": 0.626, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.44586676359176636, |
|
"learning_rate": 9.075141687584056e-06, |
|
"loss": 0.5882, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.5939783453941345, |
|
"eval_runtime": 72.333, |
|
"eval_samples_per_second": 62.212, |
|
"eval_steps_per_second": 1.949, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.4351860284805298, |
|
"learning_rate": 9.052059345422874e-06, |
|
"loss": 0.5908, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.5268151760101318, |
|
"learning_rate": 9.028722620963066e-06, |
|
"loss": 0.6155, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.5248316526412964, |
|
"learning_rate": 9.00513297924971e-06, |
|
"loss": 0.5736, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.43092867732048035, |
|
"learning_rate": 8.98129190120566e-06, |
|
"loss": 0.5602, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.4282706379890442, |
|
"learning_rate": 8.957200883538568e-06, |
|
"loss": 0.5737, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.6257330775260925, |
|
"learning_rate": 8.932861438646929e-06, |
|
"loss": 0.5982, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.44131189584732056, |
|
"learning_rate": 8.908275094525132e-06, |
|
"loss": 0.6144, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.49996450543403625, |
|
"learning_rate": 8.88344339466753e-06, |
|
"loss": 0.6138, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.41754284501075745, |
|
"learning_rate": 8.858367897971555e-06, |
|
"loss": 0.5641, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.5091477036476135, |
|
"learning_rate": 8.833050178639835e-06, |
|
"loss": 0.5455, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.5106736421585083, |
|
"learning_rate": 8.807491826081384e-06, |
|
"loss": 0.5913, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.48033618927001953, |
|
"learning_rate": 8.781694444811813e-06, |
|
"loss": 0.5773, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.5638559460639954, |
|
"learning_rate": 8.755659654352599e-06, |
|
"loss": 0.5732, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.45605936646461487, |
|
"learning_rate": 8.729389089129423e-06, |
|
"loss": 0.5911, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.4803718328475952, |
|
"learning_rate": 8.702884398369552e-06, |
|
"loss": 0.5909, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.5215190649032593, |
|
"learning_rate": 8.676147245998307e-06, |
|
"loss": 0.614, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.490953654050827, |
|
"learning_rate": 8.649179310534612e-06, |
|
"loss": 0.5687, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.5034527778625488, |
|
"learning_rate": 8.621982284985611e-06, |
|
"loss": 0.5948, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.5091165900230408, |
|
"learning_rate": 8.59455787674038e-06, |
|
"loss": 0.5695, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.49061551690101624, |
|
"learning_rate": 8.56690780746275e-06, |
|
"loss": 0.5522, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"eval_loss": 0.5888673067092896, |
|
"eval_runtime": 73.1303, |
|
"eval_samples_per_second": 61.534, |
|
"eval_steps_per_second": 1.928, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.4834423065185547, |
|
"learning_rate": 8.539033812983215e-06, |
|
"loss": 0.5413, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.6021168828010559, |
|
"learning_rate": 8.510937643189962e-06, |
|
"loss": 0.5882, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.5302062630653381, |
|
"learning_rate": 8.482621061919021e-06, |
|
"loss": 0.6059, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.4995095729827881, |
|
"learning_rate": 8.454085846843523e-06, |
|
"loss": 0.5419, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.5463216304779053, |
|
"learning_rate": 8.425333789362108e-06, |
|
"loss": 0.5941, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.536431610584259, |
|
"learning_rate": 8.396366694486466e-06, |
|
"loss": 0.6284, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.5217477083206177, |
|
"learning_rate": 8.36718638072801e-06, |
|
"loss": 0.6178, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.4792711138725281, |
|
"learning_rate": 8.337794679983724e-06, |
|
"loss": 0.5858, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.5548479557037354, |
|
"learning_rate": 8.30819343742115e-06, |
|
"loss": 0.5861, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.4717773199081421, |
|
"learning_rate": 8.278384511362562e-06, |
|
"loss": 0.6047, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.5312036275863647, |
|
"learning_rate": 8.248369773168297e-06, |
|
"loss": 0.5994, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.4545268714427948, |
|
"learning_rate": 8.218151107119266e-06, |
|
"loss": 0.5618, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.6215177774429321, |
|
"learning_rate": 8.187730410298675e-06, |
|
"loss": 0.6228, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.5464479923248291, |
|
"learning_rate": 8.157109592472924e-06, |
|
"loss": 0.5774, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.5225650668144226, |
|
"learning_rate": 8.126290575971714e-06, |
|
"loss": 0.5804, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.6188873648643494, |
|
"learning_rate": 8.095275295567361e-06, |
|
"loss": 0.5498, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.5218258500099182, |
|
"learning_rate": 8.064065698353344e-06, |
|
"loss": 0.5829, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.5987175703048706, |
|
"learning_rate": 8.032663743622065e-06, |
|
"loss": 0.5914, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.527808666229248, |
|
"learning_rate": 8.001071402741843e-06, |
|
"loss": 0.5682, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.5273358821868896, |
|
"learning_rate": 7.969290659033156e-06, |
|
"loss": 0.5919, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"eval_loss": 0.5836370587348938, |
|
"eval_runtime": 71.4972, |
|
"eval_samples_per_second": 62.939, |
|
"eval_steps_per_second": 1.972, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.6059889197349548, |
|
"learning_rate": 7.93732350764414e-06, |
|
"loss": 0.5763, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.5306861996650696, |
|
"learning_rate": 7.905171955425328e-06, |
|
"loss": 0.5567, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.5967408418655396, |
|
"learning_rate": 7.872838020803662e-06, |
|
"loss": 0.5609, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.6726911067962646, |
|
"learning_rate": 7.84032373365578e-06, |
|
"loss": 0.6471, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.5502099990844727, |
|
"learning_rate": 7.807631135180591e-06, |
|
"loss": 0.5886, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.615433394908905, |
|
"learning_rate": 7.774762277771123e-06, |
|
"loss": 0.5559, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.5481686592102051, |
|
"learning_rate": 7.741719224885675e-06, |
|
"loss": 0.5928, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.620995044708252, |
|
"learning_rate": 7.708504050918288e-06, |
|
"loss": 0.5683, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.5698446035385132, |
|
"learning_rate": 7.675118841068507e-06, |
|
"loss": 0.5909, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.5968176126480103, |
|
"learning_rate": 7.64156569121048e-06, |
|
"loss": 0.573, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.7233667373657227, |
|
"learning_rate": 7.6078467077613746e-06, |
|
"loss": 0.5888, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.5293125510215759, |
|
"learning_rate": 7.5739640075491546e-06, |
|
"loss": 0.6316, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.5775229334831238, |
|
"learning_rate": 7.5399197176796735e-06, |
|
"loss": 0.591, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.7166782021522522, |
|
"learning_rate": 7.50571597540315e-06, |
|
"loss": 0.5505, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.5287649631500244, |
|
"learning_rate": 7.471354927979984e-06, |
|
"loss": 0.6036, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.5856715440750122, |
|
"learning_rate": 7.436838732545964e-06, |
|
"loss": 0.5682, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.5226353406906128, |
|
"learning_rate": 7.402169555976841e-06, |
|
"loss": 0.5593, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.6625097393989563, |
|
"learning_rate": 7.367349574752296e-06, |
|
"loss": 0.5853, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.5979090929031372, |
|
"learning_rate": 7.332380974819301e-06, |
|
"loss": 0.5493, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.6176837682723999, |
|
"learning_rate": 7.297265951454891e-06, |
|
"loss": 0.6038, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"eval_loss": 0.5800889730453491, |
|
"eval_runtime": 69.6671, |
|
"eval_samples_per_second": 64.593, |
|
"eval_steps_per_second": 2.024, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.6878402233123779, |
|
"learning_rate": 7.262006709128351e-06, |
|
"loss": 0.5453, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.6541485786437988, |
|
"learning_rate": 7.226605461362817e-06, |
|
"loss": 0.586, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.6143850088119507, |
|
"learning_rate": 7.191064430596315e-06, |
|
"loss": 0.5422, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.6676700115203857, |
|
"learning_rate": 7.155385848042245e-06, |
|
"loss": 0.5806, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.6332250833511353, |
|
"learning_rate": 7.119571953549305e-06, |
|
"loss": 0.5585, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.7023981809616089, |
|
"learning_rate": 7.08362499546087e-06, |
|
"loss": 0.5833, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.665985107421875, |
|
"learning_rate": 7.047547230473861e-06, |
|
"loss": 0.569, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.5501468777656555, |
|
"learning_rate": 7.011340923497055e-06, |
|
"loss": 0.5824, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.5913044810295105, |
|
"learning_rate": 6.975008347508906e-06, |
|
"loss": 0.5581, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.6394723653793335, |
|
"learning_rate": 6.938551783414856e-06, |
|
"loss": 0.5813, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.6261583566665649, |
|
"learning_rate": 6.901973519904128e-06, |
|
"loss": 0.5804, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.5661801695823669, |
|
"learning_rate": 6.865275853306062e-06, |
|
"loss": 0.5587, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.5941905975341797, |
|
"learning_rate": 6.828461087445946e-06, |
|
"loss": 0.5735, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.5802645087242126, |
|
"learning_rate": 6.791531533500384e-06, |
|
"loss": 0.5854, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.5715972781181335, |
|
"learning_rate": 6.754489509852208e-06, |
|
"loss": 0.5627, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.6669459342956543, |
|
"learning_rate": 6.7173373419449295e-06, |
|
"loss": 0.5647, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.6907923221588135, |
|
"learning_rate": 6.68007736213676e-06, |
|
"loss": 0.5364, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.5516065359115601, |
|
"learning_rate": 6.6427119095541745e-06, |
|
"loss": 0.5643, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.6715050935745239, |
|
"learning_rate": 6.60524332994508e-06, |
|
"loss": 0.6026, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.5935114026069641, |
|
"learning_rate": 6.567673975531542e-06, |
|
"loss": 0.5882, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"eval_loss": 0.5773123502731323, |
|
"eval_runtime": 71.3304, |
|
"eval_samples_per_second": 63.087, |
|
"eval_steps_per_second": 1.977, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.7686386108398438, |
|
"learning_rate": 6.530006204862121e-06, |
|
"loss": 0.5391, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.6634293794631958, |
|
"learning_rate": 6.492242382663804e-06, |
|
"loss": 0.5495, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.8547865748405457, |
|
"learning_rate": 6.454384879693549e-06, |
|
"loss": 0.5456, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.8082049489021301, |
|
"learning_rate": 6.41643607258946e-06, |
|
"loss": 0.5903, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.6522872447967529, |
|
"learning_rate": 6.378398343721575e-06, |
|
"loss": 0.5476, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.663383960723877, |
|
"learning_rate": 6.340274081042307e-06, |
|
"loss": 0.5271, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.8022051453590393, |
|
"learning_rate": 6.3020656779365416e-06, |
|
"loss": 0.5656, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.7163932919502258, |
|
"learning_rate": 6.263775533071368e-06, |
|
"loss": 0.5581, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.6052596569061279, |
|
"learning_rate": 6.225406050245507e-06, |
|
"loss": 0.554, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.6838263273239136, |
|
"learning_rate": 6.186959638238397e-06, |
|
"loss": 0.5812, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.6921390891075134, |
|
"learning_rate": 6.148438710658979e-06, |
|
"loss": 0.5237, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.883646547794342, |
|
"learning_rate": 6.109845685794169e-06, |
|
"loss": 0.5267, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.6446242928504944, |
|
"learning_rate": 6.071182986457046e-06, |
|
"loss": 0.5466, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.7913056015968323, |
|
"learning_rate": 6.032453039834748e-06, |
|
"loss": 0.573, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.7966864705085754, |
|
"learning_rate": 5.993658277336101e-06, |
|
"loss": 0.5498, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.7100770473480225, |
|
"learning_rate": 5.954801134438971e-06, |
|
"loss": 0.5624, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.5544886589050293, |
|
"learning_rate": 5.915884050537378e-06, |
|
"loss": 0.5349, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.7371806502342224, |
|
"learning_rate": 5.8769094687883484e-06, |
|
"loss": 0.5789, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.7266082763671875, |
|
"learning_rate": 5.837879835958535e-06, |
|
"loss": 0.5573, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.6665763854980469, |
|
"learning_rate": 5.798797602270623e-06, |
|
"loss": 0.5302, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"eval_loss": 0.5754047632217407, |
|
"eval_runtime": 72.9642, |
|
"eval_samples_per_second": 61.674, |
|
"eval_steps_per_second": 1.932, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.7089454531669617, |
|
"learning_rate": 5.7596652212494944e-06, |
|
"loss": 0.5652, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.790558397769928, |
|
"learning_rate": 5.720485149568211e-06, |
|
"loss": 0.5547, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.7344884872436523, |
|
"learning_rate": 5.681259846893783e-06, |
|
"loss": 0.5703, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.7799204587936401, |
|
"learning_rate": 5.641991775732756e-06, |
|
"loss": 0.5369, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.7437064051628113, |
|
"learning_rate": 5.6026834012766155e-06, |
|
"loss": 0.5814, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.7207692861557007, |
|
"learning_rate": 5.5633371912470334e-06, |
|
"loss": 0.5535, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.7650673985481262, |
|
"learning_rate": 5.5239556157409366e-06, |
|
"loss": 0.5507, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.8299598097801208, |
|
"learning_rate": 5.484541147075449e-06, |
|
"loss": 0.5678, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.6018086671829224, |
|
"learning_rate": 5.4450962596326776e-06, |
|
"loss": 0.5749, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.7759122848510742, |
|
"learning_rate": 5.405623429704373e-06, |
|
"loss": 0.5561, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.7874650359153748, |
|
"learning_rate": 5.366125135336477e-06, |
|
"loss": 0.568, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.642218828201294, |
|
"learning_rate": 5.326603856173548e-06, |
|
"loss": 0.5468, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.7558509707450867, |
|
"learning_rate": 5.2870620733030994e-06, |
|
"loss": 0.5315, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.846817672252655, |
|
"learning_rate": 5.247502269099834e-06, |
|
"loss": 0.5511, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.725319504737854, |
|
"learning_rate": 5.207926927069809e-06, |
|
"loss": 0.5098, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.7795179486274719, |
|
"learning_rate": 5.168338531694523e-06, |
|
"loss": 0.5417, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.764240562915802, |
|
"learning_rate": 5.1287395682749444e-06, |
|
"loss": 0.5502, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.7871197462081909, |
|
"learning_rate": 5.089132522775486e-06, |
|
"loss": 0.575, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.8167646527290344, |
|
"learning_rate": 5.049519881667945e-06, |
|
"loss": 0.5637, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.7696388959884644, |
|
"learning_rate": 5.009904131775402e-06, |
|
"loss": 0.5331, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"eval_loss": 0.5732148289680481, |
|
"eval_runtime": 73.9926, |
|
"eval_samples_per_second": 60.817, |
|
"eval_steps_per_second": 1.906, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.8611487746238708, |
|
"learning_rate": 4.970287760116098e-06, |
|
"loss": 0.544, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.7960777878761292, |
|
"learning_rate": 4.930673253747314e-06, |
|
"loss": 0.5702, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.8783027529716492, |
|
"learning_rate": 4.8910630996092265e-06, |
|
"loss": 0.5309, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.7091485261917114, |
|
"learning_rate": 4.851459784368787e-06, |
|
"loss": 0.5565, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.7412302494049072, |
|
"learning_rate": 4.811865794263611e-06, |
|
"loss": 0.5546, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.7188848257064819, |
|
"learning_rate": 4.772283614945895e-06, |
|
"loss": 0.5927, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.9045892953872681, |
|
"learning_rate": 4.732715731326372e-06, |
|
"loss": 0.5515, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.7781686186790466, |
|
"learning_rate": 4.693164627418315e-06, |
|
"loss": 0.5289, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.8868610858917236, |
|
"learning_rate": 4.653632786181587e-06, |
|
"loss": 0.5934, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.7511059641838074, |
|
"learning_rate": 4.614122689366769e-06, |
|
"loss": 0.5464, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.7453913688659668, |
|
"learning_rate": 4.574636817359365e-06, |
|
"loss": 0.5318, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.6865512132644653, |
|
"learning_rate": 4.535177649024077e-06, |
|
"loss": 0.5527, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.8288848400115967, |
|
"learning_rate": 4.495747661549187e-06, |
|
"loss": 0.5368, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 0.8635351657867432, |
|
"learning_rate": 4.456349330291055e-06, |
|
"loss": 0.5549, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.7543601393699646, |
|
"learning_rate": 4.4169851286187005e-06, |
|
"loss": 0.5435, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 0.8399900794029236, |
|
"learning_rate": 4.3776575277585445e-06, |
|
"loss": 0.5853, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.9056334495544434, |
|
"learning_rate": 4.3383689966392604e-06, |
|
"loss": 0.5737, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 0.7865429520606995, |
|
"learning_rate": 4.2991220017367795e-06, |
|
"loss": 0.5908, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.9085346460342407, |
|
"learning_rate": 4.259919006919456e-06, |
|
"loss": 0.5468, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.844716489315033, |
|
"learning_rate": 4.220762473293379e-06, |
|
"loss": 0.5669, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"eval_loss": 0.5714393854141235, |
|
"eval_runtime": 74.9812, |
|
"eval_samples_per_second": 60.015, |
|
"eval_steps_per_second": 1.88, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 0.8216796517372131, |
|
"learning_rate": 4.181654859047873e-06, |
|
"loss": 0.5516, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.8482044339179993, |
|
"learning_rate": 4.1425986193011805e-06, |
|
"loss": 0.5463, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.8037722706794739, |
|
"learning_rate": 4.103596205946323e-06, |
|
"loss": 0.5387, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.8523209691047668, |
|
"learning_rate": 4.0646500674971826e-06, |
|
"loss": 0.5282, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 0.8726175427436829, |
|
"learning_rate": 4.0257626489347895e-06, |
|
"loss": 0.5476, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.9041292071342468, |
|
"learning_rate": 3.986936391553822e-06, |
|
"loss": 0.5677, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.8939766883850098, |
|
"learning_rate": 3.948173732809351e-06, |
|
"loss": 0.5711, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 0.7777726650238037, |
|
"learning_rate": 3.909477106163818e-06, |
|
"loss": 0.5289, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 0.7699301242828369, |
|
"learning_rate": 3.87084894093427e-06, |
|
"loss": 0.55, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 0.9905028939247131, |
|
"learning_rate": 3.832291662139845e-06, |
|
"loss": 0.541, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.8651571869850159, |
|
"learning_rate": 3.7938076903495363e-06, |
|
"loss": 0.5533, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 0.7639217376708984, |
|
"learning_rate": 3.7553994415302274e-06, |
|
"loss": 0.5511, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.8610684871673584, |
|
"learning_rate": 3.7170693268950354e-06, |
|
"loss": 0.5119, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 0.7909624576568604, |
|
"learning_rate": 3.67881975275192e-06, |
|
"loss": 0.5881, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 0.9253331422805786, |
|
"learning_rate": 3.6406531203526274e-06, |
|
"loss": 0.5612, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.8581608533859253, |
|
"learning_rate": 3.6025718257419532e-06, |
|
"loss": 0.5322, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 0.9016821384429932, |
|
"learning_rate": 3.564578259607303e-06, |
|
"loss": 0.5487, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 0.8990395665168762, |
|
"learning_rate": 3.5266748071286205e-06, |
|
"loss": 0.5471, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.9548435211181641, |
|
"learning_rate": 3.488863847828652e-06, |
|
"loss": 0.547, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 0.771557092666626, |
|
"learning_rate": 3.4511477554235516e-06, |
|
"loss": 0.5551, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"eval_loss": 0.5705960392951965, |
|
"eval_runtime": 76.5829, |
|
"eval_samples_per_second": 58.76, |
|
"eval_steps_per_second": 1.841, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 0.8474746942520142, |
|
"learning_rate": 3.4135288976738762e-06, |
|
"loss": 0.5552, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 1.0370606184005737, |
|
"learning_rate": 3.3760096362359306e-06, |
|
"loss": 0.5456, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 0.7640136480331421, |
|
"learning_rate": 3.33859232651351e-06, |
|
"loss": 0.5228, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 1.0315712690353394, |
|
"learning_rate": 3.301279317510038e-06, |
|
"loss": 0.5389, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 1.0031884908676147, |
|
"learning_rate": 3.2640729516810844e-06, |
|
"loss": 0.5153, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 0.8636874556541443, |
|
"learning_rate": 3.226975564787322e-06, |
|
"loss": 0.527, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 0.9102957248687744, |
|
"learning_rate": 3.1899894857478897e-06, |
|
"loss": 0.5397, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 0.8529887795448303, |
|
"learning_rate": 3.15311703649418e-06, |
|
"loss": 0.5184, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 0.9462995529174805, |
|
"learning_rate": 3.116360531824074e-06, |
|
"loss": 0.5559, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 0.8420120477676392, |
|
"learning_rate": 3.0797222792566306e-06, |
|
"loss": 0.5245, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 0.7965397238731384, |
|
"learning_rate": 3.043204578887209e-06, |
|
"loss": 0.5585, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 0.9633888602256775, |
|
"learning_rate": 3.00680972324308e-06, |
|
"loss": 0.4956, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 1.1136802434921265, |
|
"learning_rate": 2.970539997139509e-06, |
|
"loss": 0.5444, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 0.8797339797019958, |
|
"learning_rate": 2.9343976775363097e-06, |
|
"loss": 0.5253, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 1.0063321590423584, |
|
"learning_rate": 2.8983850333949065e-06, |
|
"loss": 0.5427, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 0.8839291930198669, |
|
"learning_rate": 2.862504325535886e-06, |
|
"loss": 0.5346, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 0.9732418060302734, |
|
"learning_rate": 2.8267578064970735e-06, |
|
"loss": 0.5889, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 1.0936757326126099, |
|
"learning_rate": 2.791147720392118e-06, |
|
"loss": 0.5136, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 0.9694124460220337, |
|
"learning_rate": 2.755676302769611e-06, |
|
"loss": 0.5276, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 0.9088903069496155, |
|
"learning_rate": 2.7203457804727364e-06, |
|
"loss": 0.5499, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"eval_loss": 0.5715650320053101, |
|
"eval_runtime": 75.4356, |
|
"eval_samples_per_second": 59.653, |
|
"eval_steps_per_second": 1.869, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"grad_norm": 0.8832049369812012, |
|
"learning_rate": 2.6851583714994843e-06, |
|
"loss": 0.5392, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"grad_norm": 1.0563007593154907, |
|
"learning_rate": 2.6501162848634023e-06, |
|
"loss": 0.5952, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 0.9844443798065186, |
|
"learning_rate": 2.6152217204549045e-06, |
|
"loss": 0.5304, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"grad_norm": 1.0739080905914307, |
|
"learning_rate": 2.580476868903197e-06, |
|
"loss": 0.4941, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 0.8901020884513855, |
|
"learning_rate": 2.5458839114387146e-06, |
|
"loss": 0.5418, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 0.9322318434715271, |
|
"learning_rate": 2.5114450197562214e-06, |
|
"loss": 0.4952, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 0.9946108460426331, |
|
"learning_rate": 2.4771623558784474e-06, |
|
"loss": 0.5155, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 0.9886655211448669, |
|
"learning_rate": 2.4430380720203785e-06, |
|
"loss": 0.5374, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 0.9973739385604858, |
|
"learning_rate": 2.409074310454136e-06, |
|
"loss": 0.5055, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 0.9865736961364746, |
|
"learning_rate": 2.375273203374489e-06, |
|
"loss": 0.5287, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 0.9992399215698242, |
|
"learning_rate": 2.341636872764994e-06, |
|
"loss": 0.5753, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 1.0160070657730103, |
|
"learning_rate": 2.3081674302647905e-06, |
|
"loss": 0.5102, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"grad_norm": 0.7992890477180481, |
|
"learning_rate": 2.2748669770360273e-06, |
|
"loss": 0.5551, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 1.0403869152069092, |
|
"learning_rate": 2.2417376036319495e-06, |
|
"loss": 0.5231, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 0.9026238918304443, |
|
"learning_rate": 2.2087813898656775e-06, |
|
"loss": 0.5148, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 1.078641414642334, |
|
"learning_rate": 2.176000404679612e-06, |
|
"loss": 0.5481, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 0.8496494889259338, |
|
"learning_rate": 2.1433967060155687e-06, |
|
"loss": 0.4961, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 0.9248107671737671, |
|
"learning_rate": 2.110972340685577e-06, |
|
"loss": 0.5402, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 1.0704095363616943, |
|
"learning_rate": 2.078729344243384e-06, |
|
"loss": 0.5228, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 0.8805019855499268, |
|
"learning_rate": 2.0466697408566664e-06, |
|
"loss": 0.545, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"eval_loss": 0.5704147815704346, |
|
"eval_runtime": 77.346, |
|
"eval_samples_per_second": 58.18, |
|
"eval_steps_per_second": 1.823, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"grad_norm": 1.0108119249343872, |
|
"learning_rate": 2.014795543179957e-06, |
|
"loss": 0.5361, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 1.01576828956604, |
|
"learning_rate": 1.9831087522282865e-06, |
|
"loss": 0.5552, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 0.9220107197761536, |
|
"learning_rate": 1.951611357251575e-06, |
|
"loss": 0.5753, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 0.9482864141464233, |
|
"learning_rate": 1.92030533560974e-06, |
|
"loss": 0.5339, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"grad_norm": 0.9235343933105469, |
|
"learning_rate": 1.8891926526485643e-06, |
|
"loss": 0.538, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 0.6921684145927429, |
|
"learning_rate": 1.8582752615763161e-06, |
|
"loss": 0.5331, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 0.930234968662262, |
|
"learning_rate": 1.8275551033411209e-06, |
|
"loss": 0.5226, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"grad_norm": 0.9894780516624451, |
|
"learning_rate": 1.7970341065091246e-06, |
|
"loss": 0.5294, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"grad_norm": 1.0227577686309814, |
|
"learning_rate": 1.766714187143413e-06, |
|
"loss": 0.5186, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 0.9115012884140015, |
|
"learning_rate": 1.736597248683724e-06, |
|
"loss": 0.5156, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"grad_norm": 1.050728440284729, |
|
"learning_rate": 1.7066851818269558e-06, |
|
"loss": 0.521, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 1.0954750776290894, |
|
"learning_rate": 1.676979864408471e-06, |
|
"loss": 0.5451, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 0.7569468021392822, |
|
"learning_rate": 1.647483161284203e-06, |
|
"loss": 0.5765, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"grad_norm": 0.8553792238235474, |
|
"learning_rate": 1.6181969242135926e-06, |
|
"loss": 0.5335, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 0.918138861656189, |
|
"learning_rate": 1.5891229917433316e-06, |
|
"loss": 0.5343, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"grad_norm": 0.988727867603302, |
|
"learning_rate": 1.5602631890919428e-06, |
|
"loss": 0.5288, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 1.130867600440979, |
|
"learning_rate": 1.531619328035197e-06, |
|
"loss": 0.4952, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"grad_norm": 1.1033731698989868, |
|
"learning_rate": 1.503193206792366e-06, |
|
"loss": 0.5053, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 1.0361392498016357, |
|
"learning_rate": 1.4749866099133409e-06, |
|
"loss": 0.555, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 1.1811940670013428, |
|
"learning_rate": 1.447001308166599e-06, |
|
"loss": 0.5253, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"eval_loss": 0.5704381465911865, |
|
"eval_runtime": 72.5701, |
|
"eval_samples_per_second": 62.009, |
|
"eval_steps_per_second": 1.943, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"grad_norm": 1.0649921894073486, |
|
"learning_rate": 1.4192390584280347e-06, |
|
"loss": 0.5397, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"grad_norm": 1.006032943725586, |
|
"learning_rate": 1.3917016035706632e-06, |
|
"loss": 0.5321, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 1.1129940748214722, |
|
"learning_rate": 1.3643906723552186e-06, |
|
"loss": 0.5246, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"grad_norm": 1.1166002750396729, |
|
"learning_rate": 1.3373079793216037e-06, |
|
"loss": 0.5405, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 0.9260038137435913, |
|
"learning_rate": 1.3104552246812768e-06, |
|
"loss": 0.543, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 1.0943797826766968, |
|
"learning_rate": 1.2838340942104944e-06, |
|
"loss": 0.5147, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"grad_norm": 1.0058125257492065, |
|
"learning_rate": 1.257446259144494e-06, |
|
"loss": 0.4995, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"grad_norm": 1.022620677947998, |
|
"learning_rate": 1.2312933760725753e-06, |
|
"loss": 0.5485, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 1.077143907546997, |
|
"learning_rate": 1.2053770868340914e-06, |
|
"loss": 0.5483, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"grad_norm": 1.3038913011550903, |
|
"learning_rate": 1.17969901841539e-06, |
|
"loss": 0.5388, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 0.8922839760780334, |
|
"learning_rate": 1.1542607828476665e-06, |
|
"loss": 0.5404, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 0.9709892272949219, |
|
"learning_rate": 1.1290639771057648e-06, |
|
"loss": 0.5204, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"grad_norm": 1.1010524034500122, |
|
"learning_rate": 1.1041101830079159e-06, |
|
"loss": 0.5429, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"grad_norm": 1.1041542291641235, |
|
"learning_rate": 1.0794009671164484e-06, |
|
"loss": 0.5256, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"grad_norm": 1.1114507913589478, |
|
"learning_rate": 1.054937880639424e-06, |
|
"loss": 0.5185, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 0.9926356077194214, |
|
"learning_rate": 1.030722459333267e-06, |
|
"loss": 0.5161, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"grad_norm": 1.00425386428833, |
|
"learning_rate": 1.0067562234063477e-06, |
|
"loss": 0.5222, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"grad_norm": 1.0224225521087646, |
|
"learning_rate": 9.830406774235474e-07, |
|
"loss": 0.521, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.9822995662689209, |
|
"learning_rate": 9.595773102118022e-07, |
|
"loss": 0.528, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"grad_norm": 1.1153786182403564, |
|
"learning_rate": 9.363675947666362e-07, |
|
"loss": 0.5418, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"eval_loss": 0.5702469944953918, |
|
"eval_runtime": 71.9125, |
|
"eval_samples_per_second": 62.576, |
|
"eval_steps_per_second": 1.961, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"grad_norm": 1.056578278541565, |
|
"learning_rate": 9.134129881596926e-07, |
|
"loss": 0.5326, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"grad_norm": 0.9958868026733398, |
|
"learning_rate": 8.907149314472579e-07, |
|
"loss": 0.558, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"grad_norm": 1.1032071113586426, |
|
"learning_rate": 8.682748495797965e-07, |
|
"loss": 0.5423, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"grad_norm": 1.0517561435699463, |
|
"learning_rate": 8.460941513124876e-07, |
|
"loss": 0.5189, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 1.1289842128753662, |
|
"learning_rate": 8.241742291168015e-07, |
|
"loss": 0.5002, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"grad_norm": 0.9873390793800354, |
|
"learning_rate": 8.025164590930623e-07, |
|
"loss": 0.5589, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"grad_norm": 0.885833740234375, |
|
"learning_rate": 7.811222008840719e-07, |
|
"loss": 0.5031, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 0.7554776072502136, |
|
"learning_rate": 7.599927975897514e-07, |
|
"loss": 0.5128, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"grad_norm": 1.0165417194366455, |
|
"learning_rate": 7.391295756828187e-07, |
|
"loss": 0.5369, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"grad_norm": 0.9958201050758362, |
|
"learning_rate": 7.185338449255197e-07, |
|
"loss": 0.4948, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"grad_norm": 1.1157667636871338, |
|
"learning_rate": 6.982068982874019e-07, |
|
"loss": 0.5181, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"grad_norm": 1.0274876356124878, |
|
"learning_rate": 6.781500118641377e-07, |
|
"loss": 0.5172, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"grad_norm": 0.9985108375549316, |
|
"learning_rate": 6.583644447974241e-07, |
|
"loss": 0.5304, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"grad_norm": 1.0142953395843506, |
|
"learning_rate": 6.388514391959283e-07, |
|
"loss": 0.5278, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"grad_norm": 1.0171020030975342, |
|
"learning_rate": 6.196122200573073e-07, |
|
"loss": 0.5269, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"grad_norm": 1.2027987241744995, |
|
"learning_rate": 6.006479951913168e-07, |
|
"loss": 0.5379, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"grad_norm": 1.064353108406067, |
|
"learning_rate": 5.819599551439703e-07, |
|
"loss": 0.4879, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 1.0336158275604248, |
|
"learning_rate": 5.635492731228098e-07, |
|
"loss": 0.5363, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"grad_norm": 1.143526554107666, |
|
"learning_rate": 5.45417104923251e-07, |
|
"loss": 0.5371, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"grad_norm": 0.9199160933494568, |
|
"learning_rate": 5.275645888560233e-07, |
|
"loss": 0.5289, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"eval_loss": 0.570408046245575, |
|
"eval_runtime": 72.3442, |
|
"eval_samples_per_second": 62.203, |
|
"eval_steps_per_second": 1.949, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"grad_norm": 1.0219011306762695, |
|
"learning_rate": 5.099928456757036e-07, |
|
"loss": 0.5115, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"grad_norm": 1.0187703371047974, |
|
"learning_rate": 4.927029785103715e-07, |
|
"loss": 0.5171, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"grad_norm": 1.006913423538208, |
|
"learning_rate": 4.756960727923371e-07, |
|
"loss": 0.5113, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 1.0249481201171875, |
|
"learning_rate": 4.5897319619001404e-07, |
|
"loss": 0.552, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"grad_norm": 1.016176462173462, |
|
"learning_rate": 4.42535398540887e-07, |
|
"loss": 0.5246, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"grad_norm": 0.9975274205207825, |
|
"learning_rate": 4.263837117856046e-07, |
|
"loss": 0.5328, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"grad_norm": 0.8603295683860779, |
|
"learning_rate": 4.10519149903198e-07, |
|
"loss": 0.5409, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"grad_norm": 1.079770565032959, |
|
"learning_rate": 3.949427088474189e-07, |
|
"loss": 0.5506, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"grad_norm": 0.9214140176773071, |
|
"learning_rate": 3.79655366484224e-07, |
|
"loss": 0.5175, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"grad_norm": 0.9992761015892029, |
|
"learning_rate": 3.646580825303786e-07, |
|
"loss": 0.5197, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 0.9221498370170593, |
|
"learning_rate": 3.49951798493211e-07, |
|
"loss": 0.5083, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"grad_norm": 1.0544548034667969, |
|
"learning_rate": 3.355374376115017e-07, |
|
"loss": 0.5187, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"grad_norm": 0.8989390134811401, |
|
"learning_rate": 3.214159047975324e-07, |
|
"loss": 0.5137, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"grad_norm": 0.9407683610916138, |
|
"learning_rate": 3.0758808658026496e-07, |
|
"loss": 0.5045, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"grad_norm": 1.034885287284851, |
|
"learning_rate": 2.940548510496977e-07, |
|
"loss": 0.5369, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"grad_norm": 0.9151519536972046, |
|
"learning_rate": 2.8081704780236083e-07, |
|
"loss": 0.5619, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 1.22360098361969, |
|
"learning_rate": 2.6787550788798243e-07, |
|
"loss": 0.5078, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"grad_norm": 1.11081063747406, |
|
"learning_rate": 2.55231043757318e-07, |
|
"loss": 0.5391, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 0.9431936144828796, |
|
"learning_rate": 2.428844492111415e-07, |
|
"loss": 0.581, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"grad_norm": 0.993818461894989, |
|
"learning_rate": 2.3083649935041642e-07, |
|
"loss": 0.4983, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"eval_loss": 0.570551335811615, |
|
"eval_runtime": 73.4513, |
|
"eval_samples_per_second": 61.265, |
|
"eval_steps_per_second": 1.92, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"grad_norm": 0.9568440318107605, |
|
"learning_rate": 2.1908795052763388e-07, |
|
"loss": 0.5094, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"grad_norm": 1.0978604555130005, |
|
"learning_rate": 2.0763954029933085e-07, |
|
"loss": 0.5262, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"grad_norm": 0.9831346273422241, |
|
"learning_rate": 1.9649198737978325e-07, |
|
"loss": 0.5381, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"grad_norm": 1.1549886465072632, |
|
"learning_rate": 1.856459915958958e-07, |
|
"loss": 0.5069, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"grad_norm": 1.122920274734497, |
|
"learning_rate": 1.7510223384325674e-07, |
|
"loss": 0.5323, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"grad_norm": 1.0226640701293945, |
|
"learning_rate": 1.6486137604339813e-07, |
|
"loss": 0.5206, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 0.9887501001358032, |
|
"learning_rate": 1.5492406110224289e-07, |
|
"loss": 0.549, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 1.069175124168396, |
|
"learning_rate": 1.4529091286973994e-07, |
|
"loss": 0.535, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"grad_norm": 1.0605347156524658, |
|
"learning_rate": 1.3596253610070165e-07, |
|
"loss": 0.4874, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 1.1193597316741943, |
|
"learning_rate": 1.2693951641683856e-07, |
|
"loss": 0.5137, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"grad_norm": 1.0010859966278076, |
|
"learning_rate": 1.1822242026999443e-07, |
|
"loss": 0.4834, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"grad_norm": 1.1622557640075684, |
|
"learning_rate": 1.0981179490658522e-07, |
|
"loss": 0.5739, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 1.0148837566375732, |
|
"learning_rate": 1.0170816833324549e-07, |
|
"loss": 0.5182, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"grad_norm": 1.1132534742355347, |
|
"learning_rate": 9.391204928367648e-08, |
|
"loss": 0.517, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"grad_norm": 1.0367366075515747, |
|
"learning_rate": 8.642392718671455e-08, |
|
"loss": 0.5191, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 1.0188932418823242, |
|
"learning_rate": 7.92442721355996e-08, |
|
"loss": 0.5277, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"grad_norm": 1.0666061639785767, |
|
"learning_rate": 7.237353485846865e-08, |
|
"loss": 0.5272, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 0.9185099601745605, |
|
"learning_rate": 6.58121466900552e-08, |
|
"loss": 0.5114, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"grad_norm": 1.1004579067230225, |
|
"learning_rate": 5.9560519544614725e-08, |
|
"loss": 0.5088, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"grad_norm": 1.0939706563949585, |
|
"learning_rate": 5.36190458900615e-08, |
|
"loss": 0.5371, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"eval_loss": 0.5705362558364868, |
|
"eval_runtime": 72.0164, |
|
"eval_samples_per_second": 62.486, |
|
"eval_steps_per_second": 1.958, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"grad_norm": 1.0084924697875977, |
|
"learning_rate": 4.798809872333276e-08, |
|
"loss": 0.5561, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"grad_norm": 1.118712067604065, |
|
"learning_rate": 4.266803154697241e-08, |
|
"loss": 0.5511, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"grad_norm": 1.0696793794631958, |
|
"learning_rate": 3.765917834693711e-08, |
|
"loss": 0.4753, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"grad_norm": 0.9693195223808289, |
|
"learning_rate": 3.296185357162973e-08, |
|
"loss": 0.5308, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"grad_norm": 1.145647406578064, |
|
"learning_rate": 2.8576352112158478e-08, |
|
"loss": 0.5174, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"grad_norm": 0.9660029411315918, |
|
"learning_rate": 2.4502949283826683e-08, |
|
"loss": 0.4866, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"grad_norm": 0.9987422823905945, |
|
"learning_rate": 2.074190080884386e-08, |
|
"loss": 0.5169, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"grad_norm": 0.9800373911857605, |
|
"learning_rate": 1.729344280027745e-08, |
|
"loss": 0.5065, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 1.1359976530075073, |
|
"learning_rate": 1.4157791747225202e-08, |
|
"loss": 0.5496, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"grad_norm": 0.9586722254753113, |
|
"learning_rate": 1.133514450122719e-08, |
|
"loss": 0.5138, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"grad_norm": 1.1620326042175293, |
|
"learning_rate": 8.825678263906789e-09, |
|
"loss": 0.5573, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"grad_norm": 0.8677152395248413, |
|
"learning_rate": 6.629550575847355e-09, |
|
"loss": 0.4891, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"grad_norm": 1.0054067373275757, |
|
"learning_rate": 4.746899306699582e-09, |
|
"loss": 0.5325, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"grad_norm": 1.2144650220870972, |
|
"learning_rate": 3.1778426465289838e-09, |
|
"loss": 0.4983, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 1.1614487171173096, |
|
"learning_rate": 1.9224790983940477e-09, |
|
"loss": 0.5108, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"grad_norm": 0.9379814267158508, |
|
"learning_rate": 9.808874721628503e-10, |
|
"loss": 0.5122, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"grad_norm": 1.0148200988769531, |
|
"learning_rate": 3.5312687956756684e-10, |
|
"loss": 0.5337, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"grad_norm": 1.089341640472412, |
|
"learning_rate": 3.923673048966681e-11, |
|
"loss": 0.529, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 3985, |
|
"total_flos": 2.2824671149817856e+18, |
|
"train_loss": 0.569244503227052, |
|
"train_runtime": 8741.9073, |
|
"train_samples_per_second": 14.585, |
|
"train_steps_per_second": 0.456 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3985, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 1000, |
|
"total_flos": 2.2824671149817856e+18, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|