|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1563, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006397952655150352, |
|
"grad_norm": 8.557194262017578, |
|
"learning_rate": 1.2738853503184715e-07, |
|
"loss": 2.0432, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003198976327575176, |
|
"grad_norm": 7.168845650993062, |
|
"learning_rate": 6.369426751592357e-07, |
|
"loss": 1.9538, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.006397952655150352, |
|
"grad_norm": 8.065982197340965, |
|
"learning_rate": 1.2738853503184715e-06, |
|
"loss": 2.0349, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009596928982725527, |
|
"grad_norm": 6.290735834401221, |
|
"learning_rate": 1.9108280254777074e-06, |
|
"loss": 1.9124, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.012795905310300703, |
|
"grad_norm": 4.973274763596367, |
|
"learning_rate": 2.547770700636943e-06, |
|
"loss": 1.9049, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01599488163787588, |
|
"grad_norm": 5.176905055023329, |
|
"learning_rate": 3.1847133757961785e-06, |
|
"loss": 1.8208, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.019193857965451054, |
|
"grad_norm": 6.309236772398139, |
|
"learning_rate": 3.821656050955415e-06, |
|
"loss": 1.8131, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.022392834293026232, |
|
"grad_norm": 4.199864900523197, |
|
"learning_rate": 4.45859872611465e-06, |
|
"loss": 1.7264, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.025591810620601407, |
|
"grad_norm": 5.022105406412313, |
|
"learning_rate": 5.095541401273886e-06, |
|
"loss": 1.7085, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.028790786948176585, |
|
"grad_norm": 4.5174043424103685, |
|
"learning_rate": 5.732484076433121e-06, |
|
"loss": 1.6624, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03198976327575176, |
|
"grad_norm": 4.3300145496731215, |
|
"learning_rate": 6.369426751592357e-06, |
|
"loss": 1.6825, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.035188739603326934, |
|
"grad_norm": 3.794347041073854, |
|
"learning_rate": 7.006369426751593e-06, |
|
"loss": 1.6846, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03838771593090211, |
|
"grad_norm": 3.802484854288831, |
|
"learning_rate": 7.64331210191083e-06, |
|
"loss": 1.6738, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04158669225847729, |
|
"grad_norm": 3.7790548942544664, |
|
"learning_rate": 8.280254777070064e-06, |
|
"loss": 1.6897, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.044785668586052464, |
|
"grad_norm": 3.8318913612512127, |
|
"learning_rate": 8.9171974522293e-06, |
|
"loss": 1.6498, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04798464491362764, |
|
"grad_norm": 4.486141166866438, |
|
"learning_rate": 9.554140127388536e-06, |
|
"loss": 1.6251, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.05118362124120281, |
|
"grad_norm": 3.7966646356196634, |
|
"learning_rate": 1.0191082802547772e-05, |
|
"loss": 1.6787, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05438259756877799, |
|
"grad_norm": 4.015240064460886, |
|
"learning_rate": 1.0828025477707008e-05, |
|
"loss": 1.6373, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05758157389635317, |
|
"grad_norm": 3.9271843790429717, |
|
"learning_rate": 1.1464968152866242e-05, |
|
"loss": 1.6241, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.060780550223928344, |
|
"grad_norm": 3.996656519840015, |
|
"learning_rate": 1.2101910828025478e-05, |
|
"loss": 1.6128, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06397952655150352, |
|
"grad_norm": 4.254476942836661, |
|
"learning_rate": 1.2738853503184714e-05, |
|
"loss": 1.632, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0671785028790787, |
|
"grad_norm": 3.960145653465224, |
|
"learning_rate": 1.337579617834395e-05, |
|
"loss": 1.6325, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.07037747920665387, |
|
"grad_norm": 4.2530304479749965, |
|
"learning_rate": 1.4012738853503186e-05, |
|
"loss": 1.6208, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.07357645553422905, |
|
"grad_norm": 3.7537798327795477, |
|
"learning_rate": 1.464968152866242e-05, |
|
"loss": 1.6089, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07677543186180422, |
|
"grad_norm": 3.7687659782987866, |
|
"learning_rate": 1.528662420382166e-05, |
|
"loss": 1.6164, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.0799744081893794, |
|
"grad_norm": 3.685834335118057, |
|
"learning_rate": 1.5923566878980894e-05, |
|
"loss": 1.6283, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.08317338451695458, |
|
"grad_norm": 3.697525223440911, |
|
"learning_rate": 1.6560509554140128e-05, |
|
"loss": 1.6206, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08637236084452975, |
|
"grad_norm": 3.8268308426440565, |
|
"learning_rate": 1.7197452229299365e-05, |
|
"loss": 1.6333, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08957133717210493, |
|
"grad_norm": 3.6563345299850463, |
|
"learning_rate": 1.78343949044586e-05, |
|
"loss": 1.675, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.0927703134996801, |
|
"grad_norm": 3.6849267294362464, |
|
"learning_rate": 1.8471337579617837e-05, |
|
"loss": 1.6583, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.09596928982725528, |
|
"grad_norm": 3.5138963267115324, |
|
"learning_rate": 1.910828025477707e-05, |
|
"loss": 1.6161, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09916826615483046, |
|
"grad_norm": 3.5909566388072407, |
|
"learning_rate": 1.9745222929936306e-05, |
|
"loss": 1.6804, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.10236724248240563, |
|
"grad_norm": 3.599403002421182, |
|
"learning_rate": 1.9999775332635076e-05, |
|
"loss": 1.6134, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.10556621880998081, |
|
"grad_norm": 3.8176081053178996, |
|
"learning_rate": 1.999840240196313e-05, |
|
"loss": 1.5822, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.10876519513755598, |
|
"grad_norm": 3.465176818490385, |
|
"learning_rate": 1.9995781526975738e-05, |
|
"loss": 1.6274, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.11196417146513116, |
|
"grad_norm": 3.5656092209776453, |
|
"learning_rate": 1.9991913034795768e-05, |
|
"loss": 1.6646, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.11516314779270634, |
|
"grad_norm": 3.3302032083057087, |
|
"learning_rate": 1.9986797408266636e-05, |
|
"loss": 1.6421, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1183621241202815, |
|
"grad_norm": 3.5207207882328273, |
|
"learning_rate": 1.9980435285892056e-05, |
|
"loss": 1.6973, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.12156110044785669, |
|
"grad_norm": 3.5533442251824634, |
|
"learning_rate": 1.9972827461756335e-05, |
|
"loss": 1.6365, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.12476007677543186, |
|
"grad_norm": 3.3243186303121948, |
|
"learning_rate": 1.9963974885425267e-05, |
|
"loss": 1.5795, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.12795905310300704, |
|
"grad_norm": 3.3487919633682726, |
|
"learning_rate": 1.9953878661827603e-05, |
|
"loss": 1.6474, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.13115802943058222, |
|
"grad_norm": 3.242336250993276, |
|
"learning_rate": 1.994254005111715e-05, |
|
"loss": 1.636, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.1343570057581574, |
|
"grad_norm": 3.173792458371255, |
|
"learning_rate": 1.992996046851548e-05, |
|
"loss": 1.616, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.13755598208573255, |
|
"grad_norm": 3.3299670491780646, |
|
"learning_rate": 1.9916141484135297e-05, |
|
"loss": 1.6634, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.14075495841330773, |
|
"grad_norm": 3.36368536908733, |
|
"learning_rate": 1.990108482278446e-05, |
|
"loss": 1.6432, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.14395393474088292, |
|
"grad_norm": 3.3251859465787046, |
|
"learning_rate": 1.9884792363750684e-05, |
|
"loss": 1.6709, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.1471529110684581, |
|
"grad_norm": 3.102386276571743, |
|
"learning_rate": 1.9867266140567024e-05, |
|
"loss": 1.6415, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.15035188739603328, |
|
"grad_norm": 3.3174253860977365, |
|
"learning_rate": 1.9848508340758014e-05, |
|
"loss": 1.6635, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.15355086372360843, |
|
"grad_norm": 3.312550584124557, |
|
"learning_rate": 1.9828521305566647e-05, |
|
"loss": 1.6573, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.15674984005118361, |
|
"grad_norm": 3.2267882077539722, |
|
"learning_rate": 1.9807307529662175e-05, |
|
"loss": 1.7259, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.1599488163787588, |
|
"grad_norm": 3.3340916386819472, |
|
"learning_rate": 1.9784869660828708e-05, |
|
"loss": 1.6317, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.16314779270633398, |
|
"grad_norm": 3.52528353307843, |
|
"learning_rate": 1.9761210499634754e-05, |
|
"loss": 1.6258, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.16634676903390916, |
|
"grad_norm": 3.4158604367890404, |
|
"learning_rate": 1.973633299908365e-05, |
|
"loss": 1.5955, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.1695457453614843, |
|
"grad_norm": 3.2974321567758773, |
|
"learning_rate": 1.9710240264245005e-05, |
|
"loss": 1.6867, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.1727447216890595, |
|
"grad_norm": 3.082593081376396, |
|
"learning_rate": 1.9682935551867132e-05, |
|
"loss": 1.611, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.17594369801663468, |
|
"grad_norm": 2.9246732858906754, |
|
"learning_rate": 1.9654422269970545e-05, |
|
"loss": 1.6379, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.17914267434420986, |
|
"grad_norm": 3.1477425965953034, |
|
"learning_rate": 1.9624703977422624e-05, |
|
"loss": 1.6206, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.18234165067178504, |
|
"grad_norm": 3.4756906983116487, |
|
"learning_rate": 1.959378438349338e-05, |
|
"loss": 1.6179, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.1855406269993602, |
|
"grad_norm": 3.1254114065928276, |
|
"learning_rate": 1.956166734739251e-05, |
|
"loss": 1.6312, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.18873960332693537, |
|
"grad_norm": 3.2909797328261816, |
|
"learning_rate": 1.95283568777877e-05, |
|
"loss": 1.636, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.19193857965451055, |
|
"grad_norm": 3.1693694159644332, |
|
"learning_rate": 1.9493857132304295e-05, |
|
"loss": 1.6035, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.19513755598208574, |
|
"grad_norm": 2.974139247790458, |
|
"learning_rate": 1.9458172417006347e-05, |
|
"loss": 1.7238, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.19833653230966092, |
|
"grad_norm": 3.0040579933657265, |
|
"learning_rate": 1.942130718585919e-05, |
|
"loss": 1.5947, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.20153550863723607, |
|
"grad_norm": 3.241345908595884, |
|
"learning_rate": 1.938326604017349e-05, |
|
"loss": 1.625, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.20473448496481125, |
|
"grad_norm": 3.1605704875054514, |
|
"learning_rate": 1.9344053728030952e-05, |
|
"loss": 1.6329, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.20793346129238643, |
|
"grad_norm": 3.0155271164409214, |
|
"learning_rate": 1.9303675143691683e-05, |
|
"loss": 1.653, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.21113243761996162, |
|
"grad_norm": 3.133069541053234, |
|
"learning_rate": 1.9262135326983326e-05, |
|
"loss": 1.6805, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.2143314139475368, |
|
"grad_norm": 3.19313647418372, |
|
"learning_rate": 1.921943946267201e-05, |
|
"loss": 1.6003, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.21753039027511195, |
|
"grad_norm": 3.237127118135618, |
|
"learning_rate": 1.9175592879815217e-05, |
|
"loss": 1.6702, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.22072936660268713, |
|
"grad_norm": 3.1481875415297886, |
|
"learning_rate": 1.9130601051096655e-05, |
|
"loss": 1.6015, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.22392834293026231, |
|
"grad_norm": 3.12526224068583, |
|
"learning_rate": 1.9084469592143154e-05, |
|
"loss": 1.6248, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.2271273192578375, |
|
"grad_norm": 3.106767075086588, |
|
"learning_rate": 1.9037204260823788e-05, |
|
"loss": 1.5752, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.23032629558541268, |
|
"grad_norm": 3.0437376827653413, |
|
"learning_rate": 1.89888109565312e-05, |
|
"loss": 1.7072, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.23352527191298783, |
|
"grad_norm": 2.966074158910365, |
|
"learning_rate": 1.893929571944527e-05, |
|
"loss": 1.6499, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.236724248240563, |
|
"grad_norm": 3.0622642071979627, |
|
"learning_rate": 1.8888664729779205e-05, |
|
"loss": 1.6566, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.2399232245681382, |
|
"grad_norm": 3.1519051199400936, |
|
"learning_rate": 1.883692430700818e-05, |
|
"loss": 1.619, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.24312220089571338, |
|
"grad_norm": 3.431719331199434, |
|
"learning_rate": 1.8784080909080568e-05, |
|
"loss": 1.6062, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.24632117722328856, |
|
"grad_norm": 3.1037976334582322, |
|
"learning_rate": 1.8730141131611882e-05, |
|
"loss": 1.6333, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.2495201535508637, |
|
"grad_norm": 3.268175679790816, |
|
"learning_rate": 1.867511170706157e-05, |
|
"loss": 1.5713, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.2527191298784389, |
|
"grad_norm": 2.9167898411021365, |
|
"learning_rate": 1.861899950389269e-05, |
|
"loss": 1.6532, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.2559181062060141, |
|
"grad_norm": 3.0277449312431726, |
|
"learning_rate": 1.856181152571463e-05, |
|
"loss": 1.5813, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2591170825335892, |
|
"grad_norm": 3.2795041159269065, |
|
"learning_rate": 1.850355491040897e-05, |
|
"loss": 1.5863, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.26231605886116444, |
|
"grad_norm": 2.8691246301448414, |
|
"learning_rate": 1.8444236929238556e-05, |
|
"loss": 1.6227, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2655150351887396, |
|
"grad_norm": 3.069643680215728, |
|
"learning_rate": 1.8383864985939944e-05, |
|
"loss": 1.5843, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.2687140115163148, |
|
"grad_norm": 3.0049575855018222, |
|
"learning_rate": 1.8322446615799317e-05, |
|
"loss": 1.5902, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.27191298784388995, |
|
"grad_norm": 2.9760192717671323, |
|
"learning_rate": 1.825998948471197e-05, |
|
"loss": 1.5627, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2751119641714651, |
|
"grad_norm": 2.8099807180506007, |
|
"learning_rate": 1.819650138822548e-05, |
|
"loss": 1.618, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.2783109404990403, |
|
"grad_norm": 2.746446519949406, |
|
"learning_rate": 1.8131990250566733e-05, |
|
"loss": 1.4955, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.28150991682661547, |
|
"grad_norm": 2.9244825789414404, |
|
"learning_rate": 1.8066464123652857e-05, |
|
"loss": 1.6359, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.2847088931541907, |
|
"grad_norm": 2.7164843334529603, |
|
"learning_rate": 1.7999931186086225e-05, |
|
"loss": 1.5848, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.28790786948176583, |
|
"grad_norm": 2.9072519471574974, |
|
"learning_rate": 1.793239974213364e-05, |
|
"loss": 1.5542, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.291106845809341, |
|
"grad_norm": 3.168251246658964, |
|
"learning_rate": 1.786387822068987e-05, |
|
"loss": 1.6191, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2943058221369162, |
|
"grad_norm": 3.2412066931732517, |
|
"learning_rate": 1.7794375174225565e-05, |
|
"loss": 1.634, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.29750479846449135, |
|
"grad_norm": 2.9282998892161918, |
|
"learning_rate": 1.7723899277719815e-05, |
|
"loss": 1.5951, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.30070377479206656, |
|
"grad_norm": 3.120120728893521, |
|
"learning_rate": 1.7652459327577377e-05, |
|
"loss": 1.5976, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.3039027511196417, |
|
"grad_norm": 2.8473524405665596, |
|
"learning_rate": 1.7580064240530746e-05, |
|
"loss": 1.5706, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.30710172744721687, |
|
"grad_norm": 2.9319991753167294, |
|
"learning_rate": 1.7506723052527243e-05, |
|
"loss": 1.5875, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.3103007037747921, |
|
"grad_norm": 3.077519173326514, |
|
"learning_rate": 1.7432444917601183e-05, |
|
"loss": 1.5981, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.31349968010236723, |
|
"grad_norm": 2.912507199999816, |
|
"learning_rate": 1.735723910673132e-05, |
|
"loss": 1.5707, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.31669865642994244, |
|
"grad_norm": 3.2113961395964146, |
|
"learning_rate": 1.7281115006683687e-05, |
|
"loss": 1.5593, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.3198976327575176, |
|
"grad_norm": 2.7491466611385853, |
|
"learning_rate": 1.7204082118840035e-05, |
|
"loss": 1.6032, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.32309660908509275, |
|
"grad_norm": 2.7684042016016615, |
|
"learning_rate": 1.712615005801185e-05, |
|
"loss": 1.6093, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.32629558541266795, |
|
"grad_norm": 2.915724474118878, |
|
"learning_rate": 1.704732855124037e-05, |
|
"loss": 1.6427, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.3294945617402431, |
|
"grad_norm": 2.9613347073093057, |
|
"learning_rate": 1.6967627436582445e-05, |
|
"loss": 1.6235, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.3326935380678183, |
|
"grad_norm": 3.0910817914002164, |
|
"learning_rate": 1.6887056661882644e-05, |
|
"loss": 1.6294, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.33589251439539347, |
|
"grad_norm": 3.000090940198684, |
|
"learning_rate": 1.6805626283531592e-05, |
|
"loss": 1.5619, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.3390914907229686, |
|
"grad_norm": 2.8786609439831548, |
|
"learning_rate": 1.6723346465210815e-05, |
|
"loss": 1.6197, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.34229046705054383, |
|
"grad_norm": 3.027454999439804, |
|
"learning_rate": 1.6640227476624163e-05, |
|
"loss": 1.6273, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.345489443378119, |
|
"grad_norm": 3.18224230209766, |
|
"learning_rate": 1.655627969221598e-05, |
|
"loss": 1.5518, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.3486884197056942, |
|
"grad_norm": 3.441475883060485, |
|
"learning_rate": 1.6471513589876247e-05, |
|
"loss": 1.5914, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.35188739603326935, |
|
"grad_norm": 2.779764601488237, |
|
"learning_rate": 1.638593974963278e-05, |
|
"loss": 1.5941, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3550863723608445, |
|
"grad_norm": 2.9474461432085937, |
|
"learning_rate": 1.6299568852330703e-05, |
|
"loss": 1.6308, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.3582853486884197, |
|
"grad_norm": 2.9978355190495516, |
|
"learning_rate": 1.6212411678299306e-05, |
|
"loss": 1.5671, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.36148432501599487, |
|
"grad_norm": 2.784909890256646, |
|
"learning_rate": 1.612447910600652e-05, |
|
"loss": 1.5486, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.3646833013435701, |
|
"grad_norm": 2.8203457503842126, |
|
"learning_rate": 1.6035782110701125e-05, |
|
"loss": 1.5871, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.36788227767114523, |
|
"grad_norm": 2.648364503580562, |
|
"learning_rate": 1.594633176304287e-05, |
|
"loss": 1.5984, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3710812539987204, |
|
"grad_norm": 3.0411727177491734, |
|
"learning_rate": 1.5856139227720714e-05, |
|
"loss": 1.6011, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3742802303262956, |
|
"grad_norm": 2.9271779235986126, |
|
"learning_rate": 1.5765215762059304e-05, |
|
"loss": 1.5788, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.37747920665387075, |
|
"grad_norm": 3.0611368707830975, |
|
"learning_rate": 1.5673572714613886e-05, |
|
"loss": 1.6257, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.38067818298144596, |
|
"grad_norm": 2.8794709774310996, |
|
"learning_rate": 1.558122152375387e-05, |
|
"loss": 1.5765, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.3838771593090211, |
|
"grad_norm": 2.7366528786349407, |
|
"learning_rate": 1.548817371623513e-05, |
|
"loss": 1.538, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.38707613563659626, |
|
"grad_norm": 2.6860631877916914, |
|
"learning_rate": 1.539444090576132e-05, |
|
"loss": 1.5859, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.3902751119641715, |
|
"grad_norm": 3.012236105518444, |
|
"learning_rate": 1.5300034791534297e-05, |
|
"loss": 1.5628, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3934740882917466, |
|
"grad_norm": 2.7725119156725473, |
|
"learning_rate": 1.520496715679391e-05, |
|
"loss": 1.5557, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.39667306461932184, |
|
"grad_norm": 2.9325766561366984, |
|
"learning_rate": 1.5109249867347276e-05, |
|
"loss": 1.5736, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.399872040946897, |
|
"grad_norm": 2.571176112058893, |
|
"learning_rate": 1.5012894870087751e-05, |
|
"loss": 1.5886, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.40307101727447214, |
|
"grad_norm": 2.872319790499578, |
|
"learning_rate": 1.4915914191503792e-05, |
|
"loss": 1.6033, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.40626999360204735, |
|
"grad_norm": 2.985639218703229, |
|
"learning_rate": 1.4818319936177885e-05, |
|
"loss": 1.574, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.4094689699296225, |
|
"grad_norm": 2.860677975957467, |
|
"learning_rate": 1.4720124285275703e-05, |
|
"loss": 1.5633, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.4126679462571977, |
|
"grad_norm": 2.825311941618321, |
|
"learning_rate": 1.4621339495025731e-05, |
|
"loss": 1.5471, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.41586692258477287, |
|
"grad_norm": 2.9150202309024844, |
|
"learning_rate": 1.4521977895189518e-05, |
|
"loss": 1.5382, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.419065898912348, |
|
"grad_norm": 3.131529181763074, |
|
"learning_rate": 1.4422051887522735e-05, |
|
"loss": 1.5984, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.42226487523992323, |
|
"grad_norm": 2.6605800472086103, |
|
"learning_rate": 1.4321573944227254e-05, |
|
"loss": 1.5737, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.4254638515674984, |
|
"grad_norm": 2.944662238934973, |
|
"learning_rate": 1.4220556606394465e-05, |
|
"loss": 1.5855, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.4286628278950736, |
|
"grad_norm": 3.0504609906344173, |
|
"learning_rate": 1.4119012482439929e-05, |
|
"loss": 1.564, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.43186180422264875, |
|
"grad_norm": 2.805988252794016, |
|
"learning_rate": 1.4016954246529697e-05, |
|
"loss": 1.5118, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.4350607805502239, |
|
"grad_norm": 2.786269865387681, |
|
"learning_rate": 1.3914394636998374e-05, |
|
"loss": 1.5447, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.4382597568777991, |
|
"grad_norm": 2.8458724254371064, |
|
"learning_rate": 1.3811346454759211e-05, |
|
"loss": 1.596, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.44145873320537427, |
|
"grad_norm": 2.9255780377453067, |
|
"learning_rate": 1.3707822561706336e-05, |
|
"loss": 1.5943, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.4446577095329495, |
|
"grad_norm": 3.1537106252949907, |
|
"learning_rate": 1.3603835879109442e-05, |
|
"loss": 1.4963, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.44785668586052463, |
|
"grad_norm": 2.635217049328388, |
|
"learning_rate": 1.349939938600099e-05, |
|
"loss": 1.5705, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4510556621880998, |
|
"grad_norm": 2.922587063417034, |
|
"learning_rate": 1.3394526117556277e-05, |
|
"loss": 1.5518, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.454254638515675, |
|
"grad_norm": 2.807359635537847, |
|
"learning_rate": 1.3289229163466421e-05, |
|
"loss": 1.5568, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.45745361484325014, |
|
"grad_norm": 2.931234500751365, |
|
"learning_rate": 1.3183521666304611e-05, |
|
"loss": 1.5254, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.46065259117082535, |
|
"grad_norm": 2.888485871147149, |
|
"learning_rate": 1.3077416819885707e-05, |
|
"loss": 1.5664, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.4638515674984005, |
|
"grad_norm": 2.9721361024671564, |
|
"learning_rate": 1.297092786761946e-05, |
|
"loss": 1.594, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.46705054382597566, |
|
"grad_norm": 2.8023380182627315, |
|
"learning_rate": 1.2864068100857565e-05, |
|
"loss": 1.5485, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.47024952015355087, |
|
"grad_norm": 2.6053459784808846, |
|
"learning_rate": 1.2756850857234686e-05, |
|
"loss": 1.4846, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.473448496481126, |
|
"grad_norm": 2.946713804424543, |
|
"learning_rate": 1.2649289519003739e-05, |
|
"loss": 1.5685, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.47664747280870123, |
|
"grad_norm": 3.1265447806548656, |
|
"learning_rate": 1.2541397511365584e-05, |
|
"loss": 1.5732, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.4798464491362764, |
|
"grad_norm": 2.773194480869555, |
|
"learning_rate": 1.2433188300793399e-05, |
|
"loss": 1.5775, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.48304542546385154, |
|
"grad_norm": 2.9306712944849007, |
|
"learning_rate": 1.2324675393351818e-05, |
|
"loss": 1.5981, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.48624440179142675, |
|
"grad_norm": 2.828506147564432, |
|
"learning_rate": 1.221587233301123e-05, |
|
"loss": 1.6012, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.4894433781190019, |
|
"grad_norm": 3.122644969081292, |
|
"learning_rate": 1.2106792699957264e-05, |
|
"loss": 1.5529, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.4926423544465771, |
|
"grad_norm": 2.8516675851476463, |
|
"learning_rate": 1.1997450108895807e-05, |
|
"loss": 1.5651, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.49584133077415227, |
|
"grad_norm": 2.79771041315179, |
|
"learning_rate": 1.1887858207353678e-05, |
|
"loss": 1.5471, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.4990403071017274, |
|
"grad_norm": 3.0118454884149637, |
|
"learning_rate": 1.1778030673975227e-05, |
|
"loss": 1.589, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.5022392834293026, |
|
"grad_norm": 2.769695350025131, |
|
"learning_rate": 1.166798121681505e-05, |
|
"loss": 1.5049, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.5054382597568778, |
|
"grad_norm": 2.8340769775532664, |
|
"learning_rate": 1.1557723571627016e-05, |
|
"loss": 1.5411, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.508637236084453, |
|
"grad_norm": 2.750139717769645, |
|
"learning_rate": 1.1447271500149849e-05, |
|
"loss": 1.5642, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.5118362124120281, |
|
"grad_norm": 2.875040151356922, |
|
"learning_rate": 1.1336638788389473e-05, |
|
"loss": 1.5641, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5150351887396033, |
|
"grad_norm": 2.751585918983617, |
|
"learning_rate": 1.122583924489832e-05, |
|
"loss": 1.522, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.5182341650671785, |
|
"grad_norm": 2.7957607756494354, |
|
"learning_rate": 1.1114886699051803e-05, |
|
"loss": 1.5684, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.5214331413947537, |
|
"grad_norm": 3.018448300935822, |
|
"learning_rate": 1.1003794999322246e-05, |
|
"loss": 1.5644, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.5246321177223289, |
|
"grad_norm": 2.833305209680887, |
|
"learning_rate": 1.089257801155037e-05, |
|
"loss": 1.5551, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.527831094049904, |
|
"grad_norm": 2.8051124688710187, |
|
"learning_rate": 1.0781249617214642e-05, |
|
"loss": 1.5526, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.5310300703774792, |
|
"grad_norm": 2.970008708784057, |
|
"learning_rate": 1.0669823711698668e-05, |
|
"loss": 1.5683, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.5342290467050543, |
|
"grad_norm": 2.9429042611125578, |
|
"learning_rate": 1.0558314202556866e-05, |
|
"loss": 1.547, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.5374280230326296, |
|
"grad_norm": 3.0145128665406093, |
|
"learning_rate": 1.0446735007778563e-05, |
|
"loss": 1.5721, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.5406269993602048, |
|
"grad_norm": 2.8199366350819557, |
|
"learning_rate": 1.0335100054050877e-05, |
|
"loss": 1.5137, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.5438259756877799, |
|
"grad_norm": 2.883058571653099, |
|
"learning_rate": 1.0223423275020431e-05, |
|
"loss": 1.5268, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5470249520153551, |
|
"grad_norm": 2.8695090948484356, |
|
"learning_rate": 1.0111718609554254e-05, |
|
"loss": 1.5612, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.5502239283429302, |
|
"grad_norm": 2.840292039654959, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5206, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.5534229046705055, |
|
"grad_norm": 2.9448801594067935, |
|
"learning_rate": 9.888281390445747e-06, |
|
"loss": 1.4688, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.5566218809980806, |
|
"grad_norm": 2.7726084893952017, |
|
"learning_rate": 9.776576724979572e-06, |
|
"loss": 1.4634, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.5598208573256558, |
|
"grad_norm": 2.926043434000589, |
|
"learning_rate": 9.664899945949128e-06, |
|
"loss": 1.6195, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.5630198336532309, |
|
"grad_norm": 2.6771555462563352, |
|
"learning_rate": 9.553264992221442e-06, |
|
"loss": 1.5128, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5662188099808061, |
|
"grad_norm": 2.947093372690194, |
|
"learning_rate": 9.441685797443138e-06, |
|
"loss": 1.5762, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.5694177863083814, |
|
"grad_norm": 2.862147214119077, |
|
"learning_rate": 9.330176288301332e-06, |
|
"loss": 1.5518, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5726167626359565, |
|
"grad_norm": 2.9082851625533728, |
|
"learning_rate": 9.21875038278536e-06, |
|
"loss": 1.6275, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.5758157389635317, |
|
"grad_norm": 2.896161142681693, |
|
"learning_rate": 9.107421988449632e-06, |
|
"loss": 1.4906, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5790147152911068, |
|
"grad_norm": 2.958572021767601, |
|
"learning_rate": 8.996205000677758e-06, |
|
"loss": 1.5325, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.582213691618682, |
|
"grad_norm": 2.9475486505888004, |
|
"learning_rate": 8.885113300948199e-06, |
|
"loss": 1.5133, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.5854126679462572, |
|
"grad_norm": 2.9704884794760584, |
|
"learning_rate": 8.774160755101685e-06, |
|
"loss": 1.4918, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.5886116442738324, |
|
"grad_norm": 2.9833940683910187, |
|
"learning_rate": 8.663361211610529e-06, |
|
"loss": 1.492, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.5918106206014075, |
|
"grad_norm": 3.072957307509492, |
|
"learning_rate": 8.552728499850153e-06, |
|
"loss": 1.5105, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.5950095969289827, |
|
"grad_norm": 2.720493027907383, |
|
"learning_rate": 8.442276428372986e-06, |
|
"loss": 1.5727, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5982085732565579, |
|
"grad_norm": 2.8343530510078696, |
|
"learning_rate": 8.332018783184952e-06, |
|
"loss": 1.5737, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.6014075495841331, |
|
"grad_norm": 2.736869895029898, |
|
"learning_rate": 8.221969326024776e-06, |
|
"loss": 1.4817, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.6046065259117083, |
|
"grad_norm": 2.9515686637484455, |
|
"learning_rate": 8.112141792646326e-06, |
|
"loss": 1.5827, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.6078055022392834, |
|
"grad_norm": 2.998496824719505, |
|
"learning_rate": 8.002549891104196e-06, |
|
"loss": 1.5098, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.6110044785668586, |
|
"grad_norm": 2.9290208992184468, |
|
"learning_rate": 7.89320730004274e-06, |
|
"loss": 1.5151, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.6142034548944337, |
|
"grad_norm": 2.8425267712019044, |
|
"learning_rate": 7.784127666988774e-06, |
|
"loss": 1.5691, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.617402431222009, |
|
"grad_norm": 2.774978059880357, |
|
"learning_rate": 7.675324606648187e-06, |
|
"loss": 1.4685, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.6206014075495841, |
|
"grad_norm": 3.2182013564031298, |
|
"learning_rate": 7.566811699206604e-06, |
|
"loss": 1.5308, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.6238003838771593, |
|
"grad_norm": 2.9391385630232914, |
|
"learning_rate": 7.458602488634416e-06, |
|
"loss": 1.5132, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.6269993602047345, |
|
"grad_norm": 2.8772833599792986, |
|
"learning_rate": 7.350710480996266e-06, |
|
"loss": 1.5446, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.6301983365323096, |
|
"grad_norm": 2.9609923922572694, |
|
"learning_rate": 7.243149142765317e-06, |
|
"loss": 1.5186, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.6333973128598849, |
|
"grad_norm": 2.9050339268535734, |
|
"learning_rate": 7.135931899142438e-06, |
|
"loss": 1.5265, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.63659628918746, |
|
"grad_norm": 2.8553553938975362, |
|
"learning_rate": 7.029072132380543e-06, |
|
"loss": 1.5446, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.6397952655150352, |
|
"grad_norm": 2.8905623377823004, |
|
"learning_rate": 6.922583180114299e-06, |
|
"loss": 1.5403, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6429942418426103, |
|
"grad_norm": 3.064334779626001, |
|
"learning_rate": 6.816478333695392e-06, |
|
"loss": 1.4827, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.6461932181701855, |
|
"grad_norm": 2.8732323238647424, |
|
"learning_rate": 6.710770836533584e-06, |
|
"loss": 1.5491, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.6493921944977608, |
|
"grad_norm": 2.925690047754127, |
|
"learning_rate": 6.605473882443725e-06, |
|
"loss": 1.4563, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.6525911708253359, |
|
"grad_norm": 2.7659749123153388, |
|
"learning_rate": 6.5006006139990115e-06, |
|
"loss": 1.5596, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.6557901471529111, |
|
"grad_norm": 2.8517880123774377, |
|
"learning_rate": 6.396164120890562e-06, |
|
"loss": 1.5336, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.6589891234804862, |
|
"grad_norm": 2.7182143838244053, |
|
"learning_rate": 6.292177438293665e-06, |
|
"loss": 1.5715, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.6621880998080614, |
|
"grad_norm": 2.8942007565586456, |
|
"learning_rate": 6.188653545240793e-06, |
|
"loss": 1.4804, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.6653870761356366, |
|
"grad_norm": 2.8706015223254515, |
|
"learning_rate": 6.085605363001628e-06, |
|
"loss": 1.4872, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.6685860524632118, |
|
"grad_norm": 3.073092993550976, |
|
"learning_rate": 5.983045753470308e-06, |
|
"loss": 1.5443, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.6717850287907869, |
|
"grad_norm": 2.969370140043883, |
|
"learning_rate": 5.880987517560075e-06, |
|
"loss": 1.4585, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6749840051183621, |
|
"grad_norm": 2.7750123138453997, |
|
"learning_rate": 5.779443393605536e-06, |
|
"loss": 1.4895, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.6781829814459372, |
|
"grad_norm": 3.219255943862367, |
|
"learning_rate": 5.678426055772746e-06, |
|
"loss": 1.5289, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.6813819577735125, |
|
"grad_norm": 2.8400549429245228, |
|
"learning_rate": 5.577948112477271e-06, |
|
"loss": 1.5421, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.6845809341010877, |
|
"grad_norm": 3.1481123457663935, |
|
"learning_rate": 5.478022104810483e-06, |
|
"loss": 1.5034, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6877799104286628, |
|
"grad_norm": 2.8544263823046427, |
|
"learning_rate": 5.378660504974271e-06, |
|
"loss": 1.463, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.690978886756238, |
|
"grad_norm": 2.9298014223513893, |
|
"learning_rate": 5.2798757147242986e-06, |
|
"loss": 1.5628, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.6941778630838131, |
|
"grad_norm": 3.1149389988395373, |
|
"learning_rate": 5.1816800638221176e-06, |
|
"loss": 1.5174, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.6973768394113884, |
|
"grad_norm": 2.917187672993627, |
|
"learning_rate": 5.084085808496211e-06, |
|
"loss": 1.5215, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.7005758157389635, |
|
"grad_norm": 2.963114425511993, |
|
"learning_rate": 4.987105129912252e-06, |
|
"loss": 1.48, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.7037747920665387, |
|
"grad_norm": 2.9831391059980303, |
|
"learning_rate": 4.890750132652724e-06, |
|
"loss": 1.514, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7069737683941139, |
|
"grad_norm": 3.0096475725136163, |
|
"learning_rate": 4.795032843206091e-06, |
|
"loss": 1.4868, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.710172744721689, |
|
"grad_norm": 3.0132931997185923, |
|
"learning_rate": 4.699965208465702e-06, |
|
"loss": 1.5582, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.7133717210492643, |
|
"grad_norm": 3.0669134476552973, |
|
"learning_rate": 4.605559094238681e-06, |
|
"loss": 1.5778, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.7165706973768394, |
|
"grad_norm": 2.7026304976719735, |
|
"learning_rate": 4.511826283764872e-06, |
|
"loss": 1.5447, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.7197696737044146, |
|
"grad_norm": 2.4264157327432567, |
|
"learning_rate": 4.418778476246132e-06, |
|
"loss": 1.4316, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.7229686500319897, |
|
"grad_norm": 2.767093184272473, |
|
"learning_rate": 4.326427285386118e-06, |
|
"loss": 1.5045, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.7261676263595649, |
|
"grad_norm": 2.7525423984037745, |
|
"learning_rate": 4.234784237940705e-06, |
|
"loss": 1.4692, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.7293666026871402, |
|
"grad_norm": 2.8302392998586265, |
|
"learning_rate": 4.143860772279287e-06, |
|
"loss": 1.4915, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.7325655790147153, |
|
"grad_norm": 3.086044746252559, |
|
"learning_rate": 4.053668236957135e-06, |
|
"loss": 1.5279, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.7357645553422905, |
|
"grad_norm": 2.992189198294163, |
|
"learning_rate": 3.964217889298882e-06, |
|
"loss": 1.4963, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7389635316698656, |
|
"grad_norm": 2.8913624378602782, |
|
"learning_rate": 3.875520893993482e-06, |
|
"loss": 1.5224, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.7421625079974408, |
|
"grad_norm": 2.966579181678815, |
|
"learning_rate": 3.787588321700697e-06, |
|
"loss": 1.5446, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.745361484325016, |
|
"grad_norm": 2.7826554976617395, |
|
"learning_rate": 3.7004311476692977e-06, |
|
"loss": 1.5291, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.7485604606525912, |
|
"grad_norm": 2.8741241950919587, |
|
"learning_rate": 3.6140602503672217e-06, |
|
"loss": 1.5155, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.7517594369801663, |
|
"grad_norm": 3.2498424072079732, |
|
"learning_rate": 3.528486410123758e-06, |
|
"loss": 1.4732, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.7549584133077415, |
|
"grad_norm": 2.889481003290222, |
|
"learning_rate": 3.443720307784022e-06, |
|
"loss": 1.4917, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.7581573896353166, |
|
"grad_norm": 2.91590203305323, |
|
"learning_rate": 3.359772523375837e-06, |
|
"loss": 1.5481, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.7613563659628919, |
|
"grad_norm": 2.9904843978709406, |
|
"learning_rate": 3.276653534789185e-06, |
|
"loss": 1.5098, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.7645553422904671, |
|
"grad_norm": 3.0417192905803416, |
|
"learning_rate": 3.1943737164684094e-06, |
|
"loss": 1.5058, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.7677543186180422, |
|
"grad_norm": 2.928336132975389, |
|
"learning_rate": 3.11294333811736e-06, |
|
"loss": 1.4466, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7709532949456174, |
|
"grad_norm": 2.9614436889799296, |
|
"learning_rate": 3.032372563417556e-06, |
|
"loss": 1.5117, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.7741522712731925, |
|
"grad_norm": 3.0765832757123373, |
|
"learning_rate": 2.952671448759631e-06, |
|
"loss": 1.5068, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.7773512476007678, |
|
"grad_norm": 2.8889529624782027, |
|
"learning_rate": 2.8738499419881517e-06, |
|
"loss": 1.5149, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.780550223928343, |
|
"grad_norm": 3.033338590523889, |
|
"learning_rate": 2.795917881159973e-06, |
|
"loss": 1.4582, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.7837492002559181, |
|
"grad_norm": 3.122248429780225, |
|
"learning_rate": 2.718884993316311e-06, |
|
"loss": 1.5254, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.7869481765834933, |
|
"grad_norm": 2.8620612033770048, |
|
"learning_rate": 2.642760893268684e-06, |
|
"loss": 1.4926, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.7901471529110684, |
|
"grad_norm": 2.957494409747179, |
|
"learning_rate": 2.5675550823988184e-06, |
|
"loss": 1.5024, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.7933461292386437, |
|
"grad_norm": 2.9507185427703164, |
|
"learning_rate": 2.493276947472756e-06, |
|
"loss": 1.5038, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7965451055662188, |
|
"grad_norm": 3.1468644000814665, |
|
"learning_rate": 2.4199357594692564e-06, |
|
"loss": 1.4553, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.799744081893794, |
|
"grad_norm": 3.020046365559696, |
|
"learning_rate": 2.347540672422627e-06, |
|
"loss": 1.4488, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.8029430582213691, |
|
"grad_norm": 3.0758818025981, |
|
"learning_rate": 2.2761007222801866e-06, |
|
"loss": 1.4925, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.8061420345489443, |
|
"grad_norm": 3.0147595677345542, |
|
"learning_rate": 2.2056248257744383e-06, |
|
"loss": 1.4815, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.8093410108765196, |
|
"grad_norm": 2.734206633065449, |
|
"learning_rate": 2.136121779310132e-06, |
|
"loss": 1.4939, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.8125399872040947, |
|
"grad_norm": 3.02643548047932, |
|
"learning_rate": 2.067600257866358e-06, |
|
"loss": 1.508, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.8157389635316699, |
|
"grad_norm": 3.125756359469005, |
|
"learning_rate": 2.000068813913777e-06, |
|
"loss": 1.5049, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.818937939859245, |
|
"grad_norm": 2.837342662182425, |
|
"learning_rate": 1.933535876347141e-06, |
|
"loss": 1.5284, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.8221369161868202, |
|
"grad_norm": 3.0307641378967536, |
|
"learning_rate": 1.8680097494332682e-06, |
|
"loss": 1.4999, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.8253358925143954, |
|
"grad_norm": 2.801930434751216, |
|
"learning_rate": 1.8034986117745245e-06, |
|
"loss": 1.4563, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.8285348688419706, |
|
"grad_norm": 3.0342605155241213, |
|
"learning_rate": 1.7400105152880331e-06, |
|
"loss": 1.4706, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.8317338451695457, |
|
"grad_norm": 2.981972081491564, |
|
"learning_rate": 1.6775533842006853e-06, |
|
"loss": 1.5812, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8349328214971209, |
|
"grad_norm": 2.95896665427553, |
|
"learning_rate": 1.6161350140600606e-06, |
|
"loss": 1.4774, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.838131797824696, |
|
"grad_norm": 2.8462517601587503, |
|
"learning_rate": 1.555763070761448e-06, |
|
"loss": 1.5473, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.8413307741522713, |
|
"grad_norm": 2.8561706155053552, |
|
"learning_rate": 1.496445089591031e-06, |
|
"loss": 1.5234, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.8445297504798465, |
|
"grad_norm": 3.3872912897412983, |
|
"learning_rate": 1.4381884742853723e-06, |
|
"loss": 1.5102, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.8477287268074216, |
|
"grad_norm": 3.0883959119755464, |
|
"learning_rate": 1.381000496107313e-06, |
|
"loss": 1.4682, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.8509277031349968, |
|
"grad_norm": 3.086585424865861, |
|
"learning_rate": 1.3248882929384321e-06, |
|
"loss": 1.4491, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.8541266794625719, |
|
"grad_norm": 2.8090437396140326, |
|
"learning_rate": 1.2698588683881185e-06, |
|
"loss": 1.521, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.8573256557901472, |
|
"grad_norm": 2.9900037626653257, |
|
"learning_rate": 1.215919090919434e-06, |
|
"loss": 1.556, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.8605246321177223, |
|
"grad_norm": 3.167648071542424, |
|
"learning_rate": 1.1630756929918218e-06, |
|
"loss": 1.3722, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.8637236084452975, |
|
"grad_norm": 3.1006715482553213, |
|
"learning_rate": 1.111335270220798e-06, |
|
"loss": 1.4499, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8669225847728727, |
|
"grad_norm": 3.1182518143820515, |
|
"learning_rate": 1.060704280554733e-06, |
|
"loss": 1.4923, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.8701215611004478, |
|
"grad_norm": 2.8077044413187777, |
|
"learning_rate": 1.0111890434688009e-06, |
|
"loss": 1.5196, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.8733205374280231, |
|
"grad_norm": 3.021623866473881, |
|
"learning_rate": 9.62795739176212e-07, |
|
"loss": 1.5857, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.8765195137555982, |
|
"grad_norm": 2.917831434671116, |
|
"learning_rate": 9.155304078568495e-07, |
|
"loss": 1.4386, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.8797184900831734, |
|
"grad_norm": 3.1639039913208444, |
|
"learning_rate": 8.693989489033494e-07, |
|
"loss": 1.4134, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.8829174664107485, |
|
"grad_norm": 3.004158580298322, |
|
"learning_rate": 8.244071201847826e-07, |
|
"loss": 1.4527, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.8861164427383237, |
|
"grad_norm": 3.0065051769894358, |
|
"learning_rate": 7.805605373279934e-07, |
|
"loss": 1.4655, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.889315419065899, |
|
"grad_norm": 2.771127515593038, |
|
"learning_rate": 7.378646730166783e-07, |
|
"loss": 1.4788, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.8925143953934741, |
|
"grad_norm": 3.035838391028782, |
|
"learning_rate": 6.963248563083203e-07, |
|
"loss": 1.4977, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.8957133717210493, |
|
"grad_norm": 2.8916082991419776, |
|
"learning_rate": 6.559462719690501e-07, |
|
"loss": 1.4785, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.8989123480486244, |
|
"grad_norm": 2.883458011693006, |
|
"learning_rate": 6.167339598265109e-07, |
|
"loss": 1.4753, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.9021113243761996, |
|
"grad_norm": 3.0406171251728242, |
|
"learning_rate": 5.78692814140811e-07, |
|
"loss": 1.4516, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.9053103007037748, |
|
"grad_norm": 2.896215057217351, |
|
"learning_rate": 5.418275829936537e-07, |
|
"loss": 1.4805, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.90850927703135, |
|
"grad_norm": 3.0263277055612123, |
|
"learning_rate": 5.06142867695708e-07, |
|
"loss": 1.4574, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.9117082533589251, |
|
"grad_norm": 2.9240236260600843, |
|
"learning_rate": 4.716431222122997e-07, |
|
"loss": 1.4998, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.9149072296865003, |
|
"grad_norm": 3.0684358127407254, |
|
"learning_rate": 4.3833265260749157e-07, |
|
"loss": 1.4883, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.9181062060140754, |
|
"grad_norm": 3.0229386052146068, |
|
"learning_rate": 4.062156165066211e-07, |
|
"loss": 1.5184, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.9213051823416507, |
|
"grad_norm": 2.993215240222199, |
|
"learning_rate": 3.752960225773772e-07, |
|
"loss": 1.4656, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.9245041586692259, |
|
"grad_norm": 2.8375909059845075, |
|
"learning_rate": 3.4557773002945607e-07, |
|
"loss": 1.4481, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.927703134996801, |
|
"grad_norm": 2.8676821088917284, |
|
"learning_rate": 3.170644481328711e-07, |
|
"loss": 1.4774, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.9309021113243762, |
|
"grad_norm": 3.093581781769162, |
|
"learning_rate": 2.8975973575499526e-07, |
|
"loss": 1.4958, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.9341010876519513, |
|
"grad_norm": 2.921596866453042, |
|
"learning_rate": 2.636670009163522e-07, |
|
"loss": 1.5072, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.9373000639795266, |
|
"grad_norm": 2.915634124882459, |
|
"learning_rate": 2.3878950036524963e-07, |
|
"loss": 1.4001, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.9404990403071017, |
|
"grad_norm": 2.9949792867089267, |
|
"learning_rate": 2.1513033917129334e-07, |
|
"loss": 1.4909, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.9436980166346769, |
|
"grad_norm": 3.237875827515494, |
|
"learning_rate": 1.9269247033782744e-07, |
|
"loss": 1.4821, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.946896992962252, |
|
"grad_norm": 2.8350786807512005, |
|
"learning_rate": 1.7147869443335463e-07, |
|
"loss": 1.4834, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.9500959692898272, |
|
"grad_norm": 2.836916452386956, |
|
"learning_rate": 1.5149165924199016e-07, |
|
"loss": 1.4725, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.9532949456174025, |
|
"grad_norm": 2.9144755189548257, |
|
"learning_rate": 1.3273385943297746e-07, |
|
"loss": 1.45, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.9564939219449776, |
|
"grad_norm": 3.073942638939728, |
|
"learning_rate": 1.1520763624931597e-07, |
|
"loss": 1.6223, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.9596928982725528, |
|
"grad_norm": 3.032206669875784, |
|
"learning_rate": 9.891517721554499e-08, |
|
"loss": 1.4349, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9628918746001279, |
|
"grad_norm": 2.955714005801713, |
|
"learning_rate": 8.385851586470318e-08, |
|
"loss": 1.4398, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.9660908509277031, |
|
"grad_norm": 3.0917574540101835, |
|
"learning_rate": 7.003953148452036e-08, |
|
"loss": 1.5322, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.9692898272552783, |
|
"grad_norm": 2.8583565158367126, |
|
"learning_rate": 5.745994888285311e-08, |
|
"loss": 1.4905, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.9724888035828535, |
|
"grad_norm": 3.0580164488934667, |
|
"learning_rate": 4.612133817239905e-08, |
|
"loss": 1.4884, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.9756877799104287, |
|
"grad_norm": 3.217782652408035, |
|
"learning_rate": 3.602511457473479e-08, |
|
"loss": 1.4992, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.9788867562380038, |
|
"grad_norm": 2.7243818878695465, |
|
"learning_rate": 2.7172538243666057e-08, |
|
"loss": 1.4353, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.982085732565579, |
|
"grad_norm": 2.895493061043642, |
|
"learning_rate": 1.9564714107945804e-08, |
|
"loss": 1.5436, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.9852847088931542, |
|
"grad_norm": 2.9802671522029742, |
|
"learning_rate": 1.3202591733365577e-08, |
|
"loss": 1.4379, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.9884836852207294, |
|
"grad_norm": 2.9884671963598284, |
|
"learning_rate": 8.086965204233688e-09, |
|
"loss": 1.5177, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.9916826615483045, |
|
"grad_norm": 3.4092231126752415, |
|
"learning_rate": 4.218473024261149e-09, |
|
"loss": 1.5238, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.9948816378758797, |
|
"grad_norm": 2.865378574655855, |
|
"learning_rate": 1.5975980368709843e-09, |
|
"loss": 1.4348, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.9980806142034548, |
|
"grad_norm": 2.9847533922224017, |
|
"learning_rate": 2.2466736492643416e-10, |
|
"loss": 1.4252, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.5372248888015747, |
|
"eval_runtime": 19.0671, |
|
"eval_samples_per_second": 26.223, |
|
"eval_steps_per_second": 0.839, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1563, |
|
"total_flos": 22001167835136.0, |
|
"train_loss": 1.5636964343102102, |
|
"train_runtime": 6788.8098, |
|
"train_samples_per_second": 7.365, |
|
"train_steps_per_second": 0.23 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1563, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 22001167835136.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|