|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.8536463369902297, |
|
"eval_steps": 500, |
|
"global_step": 6400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001333822401547234, |
|
"grad_norm": 5.80256772259428, |
|
"learning_rate": 4e-06, |
|
"loss": 1.0498, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.002667644803094468, |
|
"grad_norm": 33.895696082107904, |
|
"learning_rate": 8e-06, |
|
"loss": 1.0653, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.004001467204641702, |
|
"grad_norm": 5.523348234283539, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.0341, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.005335289606188936, |
|
"grad_norm": 11.1556403156453, |
|
"learning_rate": 1.6e-05, |
|
"loss": 0.9692, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.00666911200773617, |
|
"grad_norm": 3.7375231126561825, |
|
"learning_rate": 1.9999999999999998e-05, |
|
"loss": 0.9554, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.008002934409283404, |
|
"grad_norm": 8.43538339698909, |
|
"learning_rate": 2.4e-05, |
|
"loss": 0.8965, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.009336756810830639, |
|
"grad_norm": 13.403454896011478, |
|
"learning_rate": 2.8e-05, |
|
"loss": 0.8273, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.010670579212377872, |
|
"grad_norm": 3.95522050766088, |
|
"learning_rate": 2.9999966406213696e-05, |
|
"loss": 0.7837, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.012004401613925107, |
|
"grad_norm": 36.799552052300854, |
|
"learning_rate": 2.9999697656826056e-05, |
|
"loss": 0.8288, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.01333822401547234, |
|
"grad_norm": 1.6305479563258536, |
|
"learning_rate": 2.9999160162865885e-05, |
|
"loss": 0.7778, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.014672046417019574, |
|
"grad_norm": 2.159536648784889, |
|
"learning_rate": 2.9998353933963273e-05, |
|
"loss": 0.7616, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.016005868818566808, |
|
"grad_norm": 3.397321425707004, |
|
"learning_rate": 2.999727898456315e-05, |
|
"loss": 0.7594, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.017339691220114042, |
|
"grad_norm": 4.772220837365037, |
|
"learning_rate": 2.999593533392503e-05, |
|
"loss": 0.756, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.018673513621661277, |
|
"grad_norm": 2.4845945633126885, |
|
"learning_rate": 2.9994323006122654e-05, |
|
"loss": 0.7601, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.02000733602320851, |
|
"grad_norm": 3.591682569169127, |
|
"learning_rate": 2.9992442030043557e-05, |
|
"loss": 0.7894, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.021341158424755743, |
|
"grad_norm": 2.5679458807474416, |
|
"learning_rate": 2.9990292439388565e-05, |
|
"loss": 0.7093, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.022674980826302978, |
|
"grad_norm": 1.9412569107551652, |
|
"learning_rate": 2.9987874272671168e-05, |
|
"loss": 0.706, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.024008803227850213, |
|
"grad_norm": 3.2667097270489, |
|
"learning_rate": 2.9985187573216855e-05, |
|
"loss": 0.7586, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.025342625629397444, |
|
"grad_norm": 4.4208737375400675, |
|
"learning_rate": 2.998223238916232e-05, |
|
"loss": 0.6985, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.02667644803094468, |
|
"grad_norm": 5.515966302183704, |
|
"learning_rate": 2.9979008773454618e-05, |
|
"loss": 0.7323, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.028010270432491914, |
|
"grad_norm": 2.964165450396077, |
|
"learning_rate": 2.997551678385019e-05, |
|
"loss": 0.7603, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.02934409283403915, |
|
"grad_norm": 3.0952916783456197, |
|
"learning_rate": 2.997175648291384e-05, |
|
"loss": 0.7421, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.03067791523558638, |
|
"grad_norm": 4.213588693904103, |
|
"learning_rate": 2.996772793801763e-05, |
|
"loss": 0.7322, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.032011737637133615, |
|
"grad_norm": 1.8568586103139084, |
|
"learning_rate": 2.996343122133965e-05, |
|
"loss": 0.6922, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.033345560038680847, |
|
"grad_norm": 4.494146778909846, |
|
"learning_rate": 2.9958866409862745e-05, |
|
"loss": 0.7244, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.034679382440228085, |
|
"grad_norm": 7.438170074282725, |
|
"learning_rate": 2.9954033585373108e-05, |
|
"loss": 0.7093, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.036013204841775316, |
|
"grad_norm": 2.3744787346857015, |
|
"learning_rate": 2.994893283445885e-05, |
|
"loss": 0.6983, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.037347027243322554, |
|
"grad_norm": 1.4722011682616383, |
|
"learning_rate": 2.9943564248508415e-05, |
|
"loss": 0.6781, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.038680849644869786, |
|
"grad_norm": 3.3397620832486075, |
|
"learning_rate": 2.9937927923708966e-05, |
|
"loss": 0.7399, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.04001467204641702, |
|
"grad_norm": 5.05063397044549, |
|
"learning_rate": 2.993202396104465e-05, |
|
"loss": 0.7671, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.041348494447964255, |
|
"grad_norm": 3.0128431385936767, |
|
"learning_rate": 2.9925852466294795e-05, |
|
"loss": 0.7015, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.04268231684951149, |
|
"grad_norm": 2.0161342716764237, |
|
"learning_rate": 2.9919413550032014e-05, |
|
"loss": 0.7009, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.04401613925105872, |
|
"grad_norm": 1.3114004070324985, |
|
"learning_rate": 2.991270732762022e-05, |
|
"loss": 0.7153, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.045349961652605957, |
|
"grad_norm": 18.493625676806268, |
|
"learning_rate": 2.990573391921255e-05, |
|
"loss": 0.7518, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.04668378405415319, |
|
"grad_norm": 2.9526764059703567, |
|
"learning_rate": 2.989849344974924e-05, |
|
"loss": 0.7133, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.048017606455700426, |
|
"grad_norm": 5.26274958582726, |
|
"learning_rate": 2.9890986048955368e-05, |
|
"loss": 0.7139, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.04935142885724766, |
|
"grad_norm": 3.5319788357887933, |
|
"learning_rate": 2.9883211851338516e-05, |
|
"loss": 0.7084, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.05068525125879489, |
|
"grad_norm": 7.607269935902469, |
|
"learning_rate": 2.9875170996186392e-05, |
|
"loss": 0.7309, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.05201907366034213, |
|
"grad_norm": 2.3456663308287253, |
|
"learning_rate": 2.986686362756431e-05, |
|
"loss": 0.6827, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.05335289606188936, |
|
"grad_norm": 2.176182050789012, |
|
"learning_rate": 2.9858289894312617e-05, |
|
"loss": 0.6995, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.0546867184634366, |
|
"grad_norm": 11.171630173781537, |
|
"learning_rate": 2.9849449950044036e-05, |
|
"loss": 0.7335, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.05602054086498383, |
|
"grad_norm": 6.63441431767892, |
|
"learning_rate": 2.984034395314088e-05, |
|
"loss": 0.7031, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.05735436326653106, |
|
"grad_norm": 2.861620412225736, |
|
"learning_rate": 2.983097206675227e-05, |
|
"loss": 0.6559, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.0586881856680783, |
|
"grad_norm": 5.523165036486206, |
|
"learning_rate": 2.9821334458791156e-05, |
|
"loss": 0.726, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.06002200806962553, |
|
"grad_norm": 3.5602243751368197, |
|
"learning_rate": 2.9811431301931344e-05, |
|
"loss": 0.7202, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.06135583047117276, |
|
"grad_norm": 11.333380381168622, |
|
"learning_rate": 2.9801262773604377e-05, |
|
"loss": 0.7189, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.06268965287271999, |
|
"grad_norm": 14.159758615106613, |
|
"learning_rate": 2.9790829055996398e-05, |
|
"loss": 0.7267, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.06402347527426723, |
|
"grad_norm": 9.009079485918289, |
|
"learning_rate": 2.978013033604483e-05, |
|
"loss": 0.748, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.06535729767581447, |
|
"grad_norm": 1.9682648681675994, |
|
"learning_rate": 2.976916680543506e-05, |
|
"loss": 0.7369, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.06669112007736169, |
|
"grad_norm": 2.9278164598232777, |
|
"learning_rate": 2.975793866059701e-05, |
|
"loss": 0.7037, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.06802494247890893, |
|
"grad_norm": 5.5563562303649885, |
|
"learning_rate": 2.9746446102701606e-05, |
|
"loss": 0.6986, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.06935876488045617, |
|
"grad_norm": 4.036767303783137, |
|
"learning_rate": 2.9734689337657157e-05, |
|
"loss": 0.7119, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.07069258728200341, |
|
"grad_norm": 1.9856990692088847, |
|
"learning_rate": 2.9722668576105703e-05, |
|
"loss": 0.7205, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.07202640968355063, |
|
"grad_norm": 5.200308739226583, |
|
"learning_rate": 2.971038403341921e-05, |
|
"loss": 0.6918, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.07336023208509787, |
|
"grad_norm": 2.237349124701919, |
|
"learning_rate": 2.9697835929695727e-05, |
|
"loss": 0.7339, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.07469405448664511, |
|
"grad_norm": 1.6388680632753365, |
|
"learning_rate": 2.968502448975544e-05, |
|
"loss": 0.7086, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.07602787688819233, |
|
"grad_norm": 2.8545575025135244, |
|
"learning_rate": 2.967194994313663e-05, |
|
"loss": 0.678, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.07736169928973957, |
|
"grad_norm": 2.674647983669599, |
|
"learning_rate": 2.9658612524091594e-05, |
|
"loss": 0.7119, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.07869552169128681, |
|
"grad_norm": 2.489047760330112, |
|
"learning_rate": 2.9645012471582406e-05, |
|
"loss": 0.7382, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.08002934409283403, |
|
"grad_norm": 5.509352102248308, |
|
"learning_rate": 2.9631150029276662e-05, |
|
"loss": 0.738, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.08136316649438127, |
|
"grad_norm": 3.6489235270404015, |
|
"learning_rate": 2.9617025445543114e-05, |
|
"loss": 0.7018, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.08269698889592851, |
|
"grad_norm": 2.7813651243235697, |
|
"learning_rate": 2.9602638973447218e-05, |
|
"loss": 0.7381, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.08403081129747574, |
|
"grad_norm": 8.271390523006518, |
|
"learning_rate": 2.9587990870746574e-05, |
|
"loss": 0.7168, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.08536463369902297, |
|
"grad_norm": 1.2460611751687307, |
|
"learning_rate": 2.9573081399886356e-05, |
|
"loss": 0.7004, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.08669845610057021, |
|
"grad_norm": 1.704626418994062, |
|
"learning_rate": 2.9557910827994568e-05, |
|
"loss": 0.738, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.08803227850211744, |
|
"grad_norm": 3.275051693107957, |
|
"learning_rate": 2.9542479426877283e-05, |
|
"loss": 0.7017, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.08936610090366467, |
|
"grad_norm": 11.389990685570503, |
|
"learning_rate": 2.9526787473013753e-05, |
|
"loss": 0.7107, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.09069992330521191, |
|
"grad_norm": 5.591277359184055, |
|
"learning_rate": 2.9510835247551485e-05, |
|
"loss": 0.7141, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.09203374570675915, |
|
"grad_norm": 3.180111568581053, |
|
"learning_rate": 2.949462303630116e-05, |
|
"loss": 0.6987, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.09336756810830638, |
|
"grad_norm": 3.8428068166831753, |
|
"learning_rate": 2.9478151129731567e-05, |
|
"loss": 0.7373, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.09470139050985361, |
|
"grad_norm": 2.231397231771392, |
|
"learning_rate": 2.9461419822964348e-05, |
|
"loss": 0.6962, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.09603521291140085, |
|
"grad_norm": 18.287201889017563, |
|
"learning_rate": 2.9444429415768726e-05, |
|
"loss": 0.6723, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.09736903531294808, |
|
"grad_norm": 4.340932687135137, |
|
"learning_rate": 2.942718021255617e-05, |
|
"loss": 0.7151, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.09870285771449532, |
|
"grad_norm": 2.7813821825484446, |
|
"learning_rate": 2.940967252237488e-05, |
|
"loss": 0.7332, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.10003668011604255, |
|
"grad_norm": 2.3251782912937475, |
|
"learning_rate": 2.9391906658904296e-05, |
|
"loss": 0.6751, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.10137050251758978, |
|
"grad_norm": 8.123799866292751, |
|
"learning_rate": 2.937388294044946e-05, |
|
"loss": 0.6886, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.10270432491913702, |
|
"grad_norm": 1.528579329214318, |
|
"learning_rate": 2.9355601689935315e-05, |
|
"loss": 0.7146, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.10403814732068425, |
|
"grad_norm": 2.0278953433974825, |
|
"learning_rate": 2.933706323490092e-05, |
|
"loss": 0.7453, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.10537196972223148, |
|
"grad_norm": 1.4306270659678864, |
|
"learning_rate": 2.9318267907493583e-05, |
|
"loss": 0.6702, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.10670579212377872, |
|
"grad_norm": 1.5178081087799355, |
|
"learning_rate": 2.9299216044462903e-05, |
|
"loss": 0.7346, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.10803961452532596, |
|
"grad_norm": 9.506616797760028, |
|
"learning_rate": 2.927990798715475e-05, |
|
"loss": 0.6558, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.1093734369268732, |
|
"grad_norm": 2.4597311302505767, |
|
"learning_rate": 2.926034408150513e-05, |
|
"loss": 0.726, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.11070725932842042, |
|
"grad_norm": 12.372180964422007, |
|
"learning_rate": 2.9240524678034016e-05, |
|
"loss": 0.7308, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.11204108172996766, |
|
"grad_norm": 1.4488469801164658, |
|
"learning_rate": 2.9220450131839037e-05, |
|
"loss": 0.7072, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.1133749041315149, |
|
"grad_norm": 8.602946960846197, |
|
"learning_rate": 2.920012080258912e-05, |
|
"loss": 0.7234, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.11470872653306212, |
|
"grad_norm": 1.441195423452674, |
|
"learning_rate": 2.9179537054518085e-05, |
|
"loss": 0.6934, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.11604254893460936, |
|
"grad_norm": 4.318952956999577, |
|
"learning_rate": 2.9158699256418056e-05, |
|
"loss": 0.6534, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.1173763713361566, |
|
"grad_norm": 9.733179695623866, |
|
"learning_rate": 2.9137607781632913e-05, |
|
"loss": 0.71, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.11871019373770382, |
|
"grad_norm": 7.397049093836735, |
|
"learning_rate": 2.911626300805155e-05, |
|
"loss": 0.7386, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.12004401613925106, |
|
"grad_norm": 2.920812240139869, |
|
"learning_rate": 2.9094665318101155e-05, |
|
"loss": 0.6789, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.1213778385407983, |
|
"grad_norm": 1.7031296196271206, |
|
"learning_rate": 2.9072815098740326e-05, |
|
"loss": 0.715, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.12271166094234552, |
|
"grad_norm": 1.5630656172291801, |
|
"learning_rate": 2.9050712741452136e-05, |
|
"loss": 0.7136, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.12404548334389276, |
|
"grad_norm": 7.870543414771234, |
|
"learning_rate": 2.902835864223715e-05, |
|
"loss": 0.6669, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.12537930574543998, |
|
"grad_norm": 4.843671834991794, |
|
"learning_rate": 2.9005753201606287e-05, |
|
"loss": 0.7281, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.12671312814698724, |
|
"grad_norm": 3.010503818258016, |
|
"learning_rate": 2.8982896824573678e-05, |
|
"loss": 0.7018, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.12804695054853446, |
|
"grad_norm": 2.5552186559589654, |
|
"learning_rate": 2.8959789920649394e-05, |
|
"loss": 0.7338, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.12938077295008168, |
|
"grad_norm": 12.306055851495117, |
|
"learning_rate": 2.893643290383212e-05, |
|
"loss": 0.6732, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.13071459535162894, |
|
"grad_norm": 2.16185926525944, |
|
"learning_rate": 2.891282619260172e-05, |
|
"loss": 0.7108, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.13204841775317616, |
|
"grad_norm": 5.992378798792086, |
|
"learning_rate": 2.8888970209911754e-05, |
|
"loss": 0.6525, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.13338224015472339, |
|
"grad_norm": 2.986272238787896, |
|
"learning_rate": 2.8864865383181893e-05, |
|
"loss": 0.6655, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.13471606255627064, |
|
"grad_norm": 12.855377354582437, |
|
"learning_rate": 2.8840512144290273e-05, |
|
"loss": 0.6826, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.13604988495781786, |
|
"grad_norm": 2.045979893776702, |
|
"learning_rate": 2.8815910929565734e-05, |
|
"loss": 0.6616, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.1373837073593651, |
|
"grad_norm": 6.623264301300591, |
|
"learning_rate": 2.879106217978002e-05, |
|
"loss": 0.6935, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.13871752976091234, |
|
"grad_norm": 2.67990218211766, |
|
"learning_rate": 2.8765966340139892e-05, |
|
"loss": 0.6671, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.14005135216245956, |
|
"grad_norm": 2.699521523924172, |
|
"learning_rate": 2.8740623860279116e-05, |
|
"loss": 0.6763, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.14138517456400682, |
|
"grad_norm": 4.1129898011507535, |
|
"learning_rate": 2.871503519425044e-05, |
|
"loss": 0.7159, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.14271899696555404, |
|
"grad_norm": 2.4592021333659146, |
|
"learning_rate": 2.8689200800517448e-05, |
|
"loss": 0.6551, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.14405281936710126, |
|
"grad_norm": 5.138500389099849, |
|
"learning_rate": 2.866312114194634e-05, |
|
"loss": 0.7214, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.14538664176864852, |
|
"grad_norm": 2.822433730666048, |
|
"learning_rate": 2.8636796685797657e-05, |
|
"loss": 0.6862, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.14672046417019574, |
|
"grad_norm": 3.086468537427806, |
|
"learning_rate": 2.8610227903717876e-05, |
|
"loss": 0.6784, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.14805428657174297, |
|
"grad_norm": 2.079766793749202, |
|
"learning_rate": 2.8583415271730994e-05, |
|
"loss": 0.7065, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.14938810897329022, |
|
"grad_norm": 1.659870509072264, |
|
"learning_rate": 2.855635927022998e-05, |
|
"loss": 0.7197, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.15072193137483744, |
|
"grad_norm": 7.870626779339635, |
|
"learning_rate": 2.8529060383968175e-05, |
|
"loss": 0.7305, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.15205575377638467, |
|
"grad_norm": 3.0600340899893537, |
|
"learning_rate": 2.850151910205061e-05, |
|
"loss": 0.6922, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.15338957617793192, |
|
"grad_norm": 3.6147451373702806, |
|
"learning_rate": 2.847373591792523e-05, |
|
"loss": 0.7044, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.15472339857947914, |
|
"grad_norm": 4.740777951553679, |
|
"learning_rate": 2.844571132937407e-05, |
|
"loss": 0.6794, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.15605722098102637, |
|
"grad_norm": 3.377522973717319, |
|
"learning_rate": 2.841744583850431e-05, |
|
"loss": 0.673, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.15739104338257362, |
|
"grad_norm": 4.250656077289992, |
|
"learning_rate": 2.838893995173932e-05, |
|
"loss": 0.6975, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.15872486578412084, |
|
"grad_norm": 11.73693900915769, |
|
"learning_rate": 2.836019417980955e-05, |
|
"loss": 0.6572, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.16005868818566807, |
|
"grad_norm": 2.729291714043308, |
|
"learning_rate": 2.8331209037743387e-05, |
|
"loss": 0.7247, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.16139251058721532, |
|
"grad_norm": 2.347985877636318, |
|
"learning_rate": 2.8301985044857947e-05, |
|
"loss": 0.7199, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.16272633298876255, |
|
"grad_norm": 2.2534314586033113, |
|
"learning_rate": 2.8272522724749743e-05, |
|
"loss": 0.6835, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.16406015539030977, |
|
"grad_norm": 3.159583116387406, |
|
"learning_rate": 2.8242822605285323e-05, |
|
"loss": 0.7122, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.16539397779185702, |
|
"grad_norm": 2.086588782887239, |
|
"learning_rate": 2.8212885218591812e-05, |
|
"loss": 0.6949, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.16672780019340425, |
|
"grad_norm": 7.284236966547317, |
|
"learning_rate": 2.8182711101047362e-05, |
|
"loss": 0.6641, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.16806162259495147, |
|
"grad_norm": 3.0369619450249594, |
|
"learning_rate": 2.815230079327156e-05, |
|
"loss": 0.6731, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.16939544499649872, |
|
"grad_norm": 1.4144726574636068, |
|
"learning_rate": 2.8121654840115734e-05, |
|
"loss": 0.6898, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.17072926739804595, |
|
"grad_norm": 3.66202356670303, |
|
"learning_rate": 2.809077379065319e-05, |
|
"loss": 0.7174, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.17206308979959317, |
|
"grad_norm": 4.778073521019285, |
|
"learning_rate": 2.805965819816937e-05, |
|
"loss": 0.6186, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.17339691220114042, |
|
"grad_norm": 3.9620427201734576, |
|
"learning_rate": 2.802830862015196e-05, |
|
"loss": 0.684, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.17473073460268765, |
|
"grad_norm": 4.170199740083487, |
|
"learning_rate": 2.799672561828087e-05, |
|
"loss": 0.7102, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.17606455700423487, |
|
"grad_norm": 2.2612205048804714, |
|
"learning_rate": 2.79649097584182e-05, |
|
"loss": 0.7451, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.17739837940578213, |
|
"grad_norm": 1.7156828128822517, |
|
"learning_rate": 2.7932861610598077e-05, |
|
"loss": 0.6641, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.17873220180732935, |
|
"grad_norm": 7.960733847217257, |
|
"learning_rate": 2.7900581749016466e-05, |
|
"loss": 0.7365, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.1800660242088766, |
|
"grad_norm": 2.5364939682563756, |
|
"learning_rate": 2.7868070752020865e-05, |
|
"loss": 0.7078, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.18139984661042383, |
|
"grad_norm": 2.7446281678776137, |
|
"learning_rate": 2.7835329202099944e-05, |
|
"loss": 0.7214, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.18273366901197105, |
|
"grad_norm": 3.2416602016145886, |
|
"learning_rate": 2.7802357685873117e-05, |
|
"loss": 0.6757, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.1840674914135183, |
|
"grad_norm": 5.225459736579946, |
|
"learning_rate": 2.7769156794080033e-05, |
|
"loss": 0.7381, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.18540131381506553, |
|
"grad_norm": 5.176692689501482, |
|
"learning_rate": 2.7735727121569967e-05, |
|
"loss": 0.7354, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.18673513621661275, |
|
"grad_norm": 2.7441883232342574, |
|
"learning_rate": 2.770206926729121e-05, |
|
"loss": 0.6937, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.18806895861816, |
|
"grad_norm": 2.9792116246243525, |
|
"learning_rate": 2.7668183834280284e-05, |
|
"loss": 0.6641, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.18940278101970723, |
|
"grad_norm": 2.4645298487410723, |
|
"learning_rate": 2.763407142965117e-05, |
|
"loss": 0.6274, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.19073660342125445, |
|
"grad_norm": 7.245032878035033, |
|
"learning_rate": 2.759973266458444e-05, |
|
"loss": 0.6962, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.1920704258228017, |
|
"grad_norm": 5.642209662597534, |
|
"learning_rate": 2.756516815431627e-05, |
|
"loss": 0.7016, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.19340424822434893, |
|
"grad_norm": 2.9804981875184526, |
|
"learning_rate": 2.7530378518127445e-05, |
|
"loss": 0.7331, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.19473807062589615, |
|
"grad_norm": 7.496561660992361, |
|
"learning_rate": 2.7495364379332256e-05, |
|
"loss": 0.7234, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.1960718930274434, |
|
"grad_norm": 1.6139389803246291, |
|
"learning_rate": 2.7460126365267335e-05, |
|
"loss": 0.7013, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.19740571542899063, |
|
"grad_norm": 4.618678334755141, |
|
"learning_rate": 2.7424665107280402e-05, |
|
"loss": 0.6892, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.19873953783053785, |
|
"grad_norm": 15.494190234738744, |
|
"learning_rate": 2.738898124071898e-05, |
|
"loss": 0.6785, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.2000733602320851, |
|
"grad_norm": 3.1680363319798954, |
|
"learning_rate": 2.735307540491898e-05, |
|
"loss": 0.669, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.20140718263363233, |
|
"grad_norm": 2.5397562341036224, |
|
"learning_rate": 2.7316948243193273e-05, |
|
"loss": 0.6726, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.20274100503517956, |
|
"grad_norm": 4.139021422606072, |
|
"learning_rate": 2.7280600402820146e-05, |
|
"loss": 0.6706, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.2040748274367268, |
|
"grad_norm": 2.7422468825646065, |
|
"learning_rate": 2.724403253503171e-05, |
|
"loss": 0.7078, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.20540864983827403, |
|
"grad_norm": 2.744225768808104, |
|
"learning_rate": 2.7207245295002242e-05, |
|
"loss": 0.6821, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.20674247223982126, |
|
"grad_norm": 2.234040668790152, |
|
"learning_rate": 2.7170239341836436e-05, |
|
"loss": 0.7451, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.2080762946413685, |
|
"grad_norm": 2.531733996425376, |
|
"learning_rate": 2.7133015338557585e-05, |
|
"loss": 0.7205, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.20941011704291573, |
|
"grad_norm": 2.9772483856455616, |
|
"learning_rate": 2.7095573952095727e-05, |
|
"loss": 0.7274, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.21074393944446296, |
|
"grad_norm": 3.317235333047955, |
|
"learning_rate": 2.705791585327568e-05, |
|
"loss": 0.7309, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.2120777618460102, |
|
"grad_norm": 1.9652386793628944, |
|
"learning_rate": 2.7020041716805014e-05, |
|
"loss": 0.7157, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.21341158424755743, |
|
"grad_norm": 2.93724058913164, |
|
"learning_rate": 2.6981952221261986e-05, |
|
"loss": 0.7123, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.21474540664910466, |
|
"grad_norm": 6.395577225750395, |
|
"learning_rate": 2.6943648049083366e-05, |
|
"loss": 0.6991, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.2160792290506519, |
|
"grad_norm": 2.4292347967714973, |
|
"learning_rate": 2.6905129886552208e-05, |
|
"loss": 0.7004, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.21741305145219914, |
|
"grad_norm": 1.8304810950546353, |
|
"learning_rate": 2.6866398423785568e-05, |
|
"loss": 0.6941, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.2187468738537464, |
|
"grad_norm": 2.762870839632077, |
|
"learning_rate": 2.682745435472212e-05, |
|
"loss": 0.6928, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.2200806962552936, |
|
"grad_norm": 3.4172019229090917, |
|
"learning_rate": 2.6788298377109748e-05, |
|
"loss": 0.7344, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.22141451865684084, |
|
"grad_norm": 2.7483538989548175, |
|
"learning_rate": 2.6748931192493017e-05, |
|
"loss": 0.7367, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.2227483410583881, |
|
"grad_norm": 7.314729269236597, |
|
"learning_rate": 2.670935350620063e-05, |
|
"loss": 0.6849, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.2240821634599353, |
|
"grad_norm": 3.8688065039432527, |
|
"learning_rate": 2.6669566027332767e-05, |
|
"loss": 0.6812, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.22541598586148254, |
|
"grad_norm": 7.10517346658295, |
|
"learning_rate": 2.6629569468748404e-05, |
|
"loss": 0.6089, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.2267498082630298, |
|
"grad_norm": 2.4198822683275147, |
|
"learning_rate": 2.658936454705251e-05, |
|
"loss": 0.6666, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.22808363066457701, |
|
"grad_norm": 2.4915285584652054, |
|
"learning_rate": 2.6548951982583246e-05, |
|
"loss": 0.7088, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.22941745306612424, |
|
"grad_norm": 2.2849831540010537, |
|
"learning_rate": 2.650833249939903e-05, |
|
"loss": 0.7149, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.2307512754676715, |
|
"grad_norm": 1.5098088938051029, |
|
"learning_rate": 2.6467506825265573e-05, |
|
"loss": 0.7254, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.23208509786921871, |
|
"grad_norm": 3.4800248296443814, |
|
"learning_rate": 2.642647569164284e-05, |
|
"loss": 0.6916, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.23341892027076594, |
|
"grad_norm": 7.281500947090542, |
|
"learning_rate": 2.638523983367194e-05, |
|
"loss": 0.6831, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.2347527426723132, |
|
"grad_norm": 3.0161864395495446, |
|
"learning_rate": 2.634379999016198e-05, |
|
"loss": 0.6999, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.23608656507386042, |
|
"grad_norm": 2.0917745352156762, |
|
"learning_rate": 2.6302156903576784e-05, |
|
"loss": 0.7112, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.23742038747540764, |
|
"grad_norm": 1.918811185774526, |
|
"learning_rate": 2.6260311320021628e-05, |
|
"loss": 0.6725, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.2387542098769549, |
|
"grad_norm": 3.0697413876733695, |
|
"learning_rate": 2.6218263989229855e-05, |
|
"loss": 0.7133, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.24008803227850212, |
|
"grad_norm": 6.14274393655379, |
|
"learning_rate": 2.617601566454944e-05, |
|
"loss": 0.6678, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.24142185468004934, |
|
"grad_norm": 4.259979200715344, |
|
"learning_rate": 2.613356710292951e-05, |
|
"loss": 0.7013, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.2427556770815966, |
|
"grad_norm": 3.1011058557692808, |
|
"learning_rate": 2.6090919064906766e-05, |
|
"loss": 0.7027, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.24408949948314382, |
|
"grad_norm": 3.677900978078831, |
|
"learning_rate": 2.6048072314591854e-05, |
|
"loss": 0.711, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.24542332188469104, |
|
"grad_norm": 2.368576699713982, |
|
"learning_rate": 2.600502761965569e-05, |
|
"loss": 0.6917, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.2467571442862383, |
|
"grad_norm": 3.0346306894457, |
|
"learning_rate": 2.59617857513157e-05, |
|
"loss": 0.69, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.24809096668778552, |
|
"grad_norm": 3.1228131080916204, |
|
"learning_rate": 2.591834748432198e-05, |
|
"loss": 0.695, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.24942478908933274, |
|
"grad_norm": 2.6886660685401034, |
|
"learning_rate": 2.5874713596943465e-05, |
|
"loss": 0.6681, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.25075861149087997, |
|
"grad_norm": 1.7244460999561722, |
|
"learning_rate": 2.5830884870953933e-05, |
|
"loss": 0.6737, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.25209243389242725, |
|
"grad_norm": 2.4283725332509842, |
|
"learning_rate": 2.578686209161803e-05, |
|
"loss": 0.6598, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.2534262562939745, |
|
"grad_norm": 5.496556851547161, |
|
"learning_rate": 2.5742646047677186e-05, |
|
"loss": 0.6931, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.2547600786955217, |
|
"grad_norm": 1.2751270156124934, |
|
"learning_rate": 2.5698237531335493e-05, |
|
"loss": 0.7043, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.2560939010970689, |
|
"grad_norm": 8.807017683974516, |
|
"learning_rate": 2.56536373382455e-05, |
|
"loss": 0.6234, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.25742772349861615, |
|
"grad_norm": 3.6331868296726277, |
|
"learning_rate": 2.5608846267493974e-05, |
|
"loss": 0.6763, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.25876154590016337, |
|
"grad_norm": 5.094905230807839, |
|
"learning_rate": 2.5563865121587563e-05, |
|
"loss": 0.6692, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.26009536830171065, |
|
"grad_norm": 2.0520732769663237, |
|
"learning_rate": 2.5518694706438445e-05, |
|
"loss": 0.7008, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.2614291907032579, |
|
"grad_norm": 2.1265138955486336, |
|
"learning_rate": 2.5473335831349842e-05, |
|
"loss": 0.6623, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.2627630131048051, |
|
"grad_norm": 4.532469697105077, |
|
"learning_rate": 2.5427789309001577e-05, |
|
"loss": 0.7099, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.2640968355063523, |
|
"grad_norm": 1.8912900905557881, |
|
"learning_rate": 2.538205595543548e-05, |
|
"loss": 0.712, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.26543065790789955, |
|
"grad_norm": 9.714825687307293, |
|
"learning_rate": 2.5336136590040767e-05, |
|
"loss": 0.6418, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.26676448030944677, |
|
"grad_norm": 4.375615975749738, |
|
"learning_rate": 2.529003203553937e-05, |
|
"loss": 0.6933, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.26809830271099405, |
|
"grad_norm": 5.945657366701919, |
|
"learning_rate": 2.5243743117971186e-05, |
|
"loss": 0.6748, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.2694321251125413, |
|
"grad_norm": 7.453951551881255, |
|
"learning_rate": 2.5197270666679295e-05, |
|
"loss": 0.7004, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.2707659475140885, |
|
"grad_norm": 2.3916662603858665, |
|
"learning_rate": 2.515061551429509e-05, |
|
"loss": 0.6961, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.2720997699156357, |
|
"grad_norm": 3.5972047868369104, |
|
"learning_rate": 2.5103778496723334e-05, |
|
"loss": 0.7058, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.27343359231718295, |
|
"grad_norm": 4.525268184238612, |
|
"learning_rate": 2.5056760453127242e-05, |
|
"loss": 0.6704, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.2747674147187302, |
|
"grad_norm": 5.9581146555788465, |
|
"learning_rate": 2.5009562225913385e-05, |
|
"loss": 0.6722, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.27610123712027745, |
|
"grad_norm": 4.163590223716233, |
|
"learning_rate": 2.4962184660716645e-05, |
|
"loss": 0.6933, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.2774350595218247, |
|
"grad_norm": 2.0180801697563258, |
|
"learning_rate": 2.4914628606385022e-05, |
|
"loss": 0.6982, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.2787688819233719, |
|
"grad_norm": 2.3996169579330373, |
|
"learning_rate": 2.4866894914964462e-05, |
|
"loss": 0.6832, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.2801027043249191, |
|
"grad_norm": 20.07054133895426, |
|
"learning_rate": 2.481898444168357e-05, |
|
"loss": 0.6871, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.28143652672646635, |
|
"grad_norm": 3.563765719247629, |
|
"learning_rate": 2.4770898044938284e-05, |
|
"loss": 0.703, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.28277034912801363, |
|
"grad_norm": 1.9816905810381245, |
|
"learning_rate": 2.4722636586276522e-05, |
|
"loss": 0.7132, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.28410417152956086, |
|
"grad_norm": 4.0053115388283205, |
|
"learning_rate": 2.4674200930382712e-05, |
|
"loss": 0.6991, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.2854379939311081, |
|
"grad_norm": 1.9643538302216321, |
|
"learning_rate": 2.4625591945062326e-05, |
|
"loss": 0.7182, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.2867718163326553, |
|
"grad_norm": 1.7027289253737494, |
|
"learning_rate": 2.4576810501226318e-05, |
|
"loss": 0.6856, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.28810563873420253, |
|
"grad_norm": 3.394597130806682, |
|
"learning_rate": 2.4527857472875515e-05, |
|
"loss": 0.7013, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.28943946113574975, |
|
"grad_norm": 2.766786923916393, |
|
"learning_rate": 2.447873373708498e-05, |
|
"loss": 0.6913, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.29077328353729703, |
|
"grad_norm": 6.781532105937228, |
|
"learning_rate": 2.4429440173988275e-05, |
|
"loss": 0.7401, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.29210710593884426, |
|
"grad_norm": 2.6220209383444946, |
|
"learning_rate": 2.43799776667617e-05, |
|
"loss": 0.7287, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.2934409283403915, |
|
"grad_norm": 4.597566226152422, |
|
"learning_rate": 2.4330347101608492e-05, |
|
"loss": 0.6664, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.2947747507419387, |
|
"grad_norm": 3.15622915128866, |
|
"learning_rate": 2.428054936774289e-05, |
|
"loss": 0.6757, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.29610857314348593, |
|
"grad_norm": 3.5777836932521065, |
|
"learning_rate": 2.423058535737427e-05, |
|
"loss": 0.7396, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.29744239554503316, |
|
"grad_norm": 2.505384749600403, |
|
"learning_rate": 2.418045596569111e-05, |
|
"loss": 0.7156, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.29877621794658044, |
|
"grad_norm": 15.640998645324629, |
|
"learning_rate": 2.4130162090844976e-05, |
|
"loss": 0.7016, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.30011004034812766, |
|
"grad_norm": 6.1147200283733865, |
|
"learning_rate": 2.4079704633934427e-05, |
|
"loss": 0.6835, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.3014438627496749, |
|
"grad_norm": 2.4704828096249907, |
|
"learning_rate": 2.4029084498988864e-05, |
|
"loss": 0.717, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.3027776851512221, |
|
"grad_norm": 3.624817679194012, |
|
"learning_rate": 2.3978302592952332e-05, |
|
"loss": 0.6863, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.30411150755276933, |
|
"grad_norm": 7.1778372122735155, |
|
"learning_rate": 2.392735982566728e-05, |
|
"loss": 0.7057, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.30544532995431656, |
|
"grad_norm": 1.541203747230883, |
|
"learning_rate": 2.387625710985826e-05, |
|
"loss": 0.6755, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.30677915235586384, |
|
"grad_norm": 5.290753363343769, |
|
"learning_rate": 2.3824995361115552e-05, |
|
"loss": 0.7214, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.30811297475741106, |
|
"grad_norm": 11.18524078914846, |
|
"learning_rate": 2.3773575497878784e-05, |
|
"loss": 0.687, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.3094467971589583, |
|
"grad_norm": 2.8473409260968854, |
|
"learning_rate": 2.372199844142048e-05, |
|
"loss": 0.6588, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.3107806195605055, |
|
"grad_norm": 3.6509202763742894, |
|
"learning_rate": 2.3670265115829523e-05, |
|
"loss": 0.7146, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.31211444196205274, |
|
"grad_norm": 2.86323212169014, |
|
"learning_rate": 2.3618376447994633e-05, |
|
"loss": 0.6965, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.31344826436359996, |
|
"grad_norm": 1.6724444694024563, |
|
"learning_rate": 2.3566333367587737e-05, |
|
"loss": 0.6827, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.31478208676514724, |
|
"grad_norm": 3.7438462947121876, |
|
"learning_rate": 2.3514136807047318e-05, |
|
"loss": 0.677, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.31611590916669446, |
|
"grad_norm": 3.150319939971515, |
|
"learning_rate": 2.3461787701561724e-05, |
|
"loss": 0.6926, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.3174497315682417, |
|
"grad_norm": 1.9724696911512674, |
|
"learning_rate": 2.340928698905239e-05, |
|
"loss": 0.7269, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.3187835539697889, |
|
"grad_norm": 2.6615995505256604, |
|
"learning_rate": 2.335663561015704e-05, |
|
"loss": 0.719, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.32011737637133614, |
|
"grad_norm": 3.648818329043563, |
|
"learning_rate": 2.3303834508212845e-05, |
|
"loss": 0.6593, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.3214511987728834, |
|
"grad_norm": 5.032935766388129, |
|
"learning_rate": 2.325088462923951e-05, |
|
"loss": 0.7018, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.32278502117443064, |
|
"grad_norm": 5.116190153583237, |
|
"learning_rate": 2.319778692192233e-05, |
|
"loss": 0.6138, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.32411884357597787, |
|
"grad_norm": 8.77553429349065, |
|
"learning_rate": 2.3144542337595196e-05, |
|
"loss": 0.6995, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.3254526659775251, |
|
"grad_norm": 4.020402137418298, |
|
"learning_rate": 2.3091151830223537e-05, |
|
"loss": 0.6935, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.3267864883790723, |
|
"grad_norm": 2.326990350307363, |
|
"learning_rate": 2.3037616356387237e-05, |
|
"loss": 0.6657, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.32812031078061954, |
|
"grad_norm": 1.9450305290081706, |
|
"learning_rate": 2.2983936875263495e-05, |
|
"loss": 0.6884, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.3294541331821668, |
|
"grad_norm": 2.4083218262957407, |
|
"learning_rate": 2.2930114348609655e-05, |
|
"loss": 0.6324, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.33078795558371404, |
|
"grad_norm": 4.469293094525185, |
|
"learning_rate": 2.2876149740745935e-05, |
|
"loss": 0.7054, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.33212177798526127, |
|
"grad_norm": 3.0408327884382613, |
|
"learning_rate": 2.28220440185382e-05, |
|
"loss": 0.6996, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.3334556003868085, |
|
"grad_norm": 2.5340984000691273, |
|
"learning_rate": 2.2767798151380597e-05, |
|
"loss": 0.6908, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.3347894227883557, |
|
"grad_norm": 2.4867165525033, |
|
"learning_rate": 2.27134131111782e-05, |
|
"loss": 0.6838, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.33612324518990294, |
|
"grad_norm": 14.755496795057269, |
|
"learning_rate": 2.2658889872329628e-05, |
|
"loss": 0.7072, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.3374570675914502, |
|
"grad_norm": 11.498768616138861, |
|
"learning_rate": 2.2604229411709518e-05, |
|
"loss": 0.6837, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.33879088999299745, |
|
"grad_norm": 1.6627733851927542, |
|
"learning_rate": 2.25494327086511e-05, |
|
"loss": 0.6948, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.34012471239454467, |
|
"grad_norm": 4.465322393758394, |
|
"learning_rate": 2.2494500744928583e-05, |
|
"loss": 0.706, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.3414585347960919, |
|
"grad_norm": 2.5329140738676714, |
|
"learning_rate": 2.243943450473963e-05, |
|
"loss": 0.6652, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.3427923571976391, |
|
"grad_norm": 2.6213955428320963, |
|
"learning_rate": 2.2384234974687658e-05, |
|
"loss": 0.7123, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.34412617959918634, |
|
"grad_norm": 2.8450668136715827, |
|
"learning_rate": 2.2328903143764216e-05, |
|
"loss": 0.6748, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.3454600020007336, |
|
"grad_norm": 9.246863580911334, |
|
"learning_rate": 2.2273440003331237e-05, |
|
"loss": 0.6774, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.34679382440228085, |
|
"grad_norm": 2.610989556515575, |
|
"learning_rate": 2.2217846547103275e-05, |
|
"loss": 0.7042, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.3481276468038281, |
|
"grad_norm": 7.325969061692186, |
|
"learning_rate": 2.216212377112972e-05, |
|
"loss": 0.6834, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.3494614692053753, |
|
"grad_norm": 3.001379331751721, |
|
"learning_rate": 2.2106272673776934e-05, |
|
"loss": 0.7033, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.3507952916069225, |
|
"grad_norm": 3.463073346975308, |
|
"learning_rate": 2.2050294255710375e-05, |
|
"loss": 0.6839, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.35212911400846975, |
|
"grad_norm": 3.524564101951424, |
|
"learning_rate": 2.1994189519876663e-05, |
|
"loss": 0.6948, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.353462936410017, |
|
"grad_norm": 3.152341329769827, |
|
"learning_rate": 2.19379594714856e-05, |
|
"loss": 0.6767, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.35479675881156425, |
|
"grad_norm": 4.2343916663936305, |
|
"learning_rate": 2.188160511799219e-05, |
|
"loss": 0.6755, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.3561305812131115, |
|
"grad_norm": 2.7909676165285813, |
|
"learning_rate": 2.1825127469078555e-05, |
|
"loss": 0.6694, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.3574644036146587, |
|
"grad_norm": 1.8765416483232782, |
|
"learning_rate": 2.1768527536635868e-05, |
|
"loss": 0.7031, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.3587982260162059, |
|
"grad_norm": 13.262978009985517, |
|
"learning_rate": 2.171180633474621e-05, |
|
"loss": 0.7371, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.3601320484177532, |
|
"grad_norm": 3.886717400478723, |
|
"learning_rate": 2.1654964879664407e-05, |
|
"loss": 0.7109, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.3614658708193004, |
|
"grad_norm": 2.040560351248799, |
|
"learning_rate": 2.1598004189799826e-05, |
|
"loss": 0.7274, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.36279969322084765, |
|
"grad_norm": 24.610089275348535, |
|
"learning_rate": 2.1540925285698122e-05, |
|
"loss": 0.6886, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.3641335156223949, |
|
"grad_norm": 3.6439264742220216, |
|
"learning_rate": 2.148372919002295e-05, |
|
"loss": 0.681, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.3654673380239421, |
|
"grad_norm": 5.83580774778366, |
|
"learning_rate": 2.142641692753765e-05, |
|
"loss": 0.6502, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.3668011604254893, |
|
"grad_norm": 1.8530940550203352, |
|
"learning_rate": 2.1368989525086893e-05, |
|
"loss": 0.6854, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.3681349828270366, |
|
"grad_norm": 5.003536499561226, |
|
"learning_rate": 2.1311448011578255e-05, |
|
"loss": 0.6699, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.36946880522858383, |
|
"grad_norm": 2.6889933495770912, |
|
"learning_rate": 2.125379341796382e-05, |
|
"loss": 0.741, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.37080262763013105, |
|
"grad_norm": 2.0672372686575575, |
|
"learning_rate": 2.1196026777221684e-05, |
|
"loss": 0.693, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.3721364500316783, |
|
"grad_norm": 3.023122371840424, |
|
"learning_rate": 2.1138149124337448e-05, |
|
"loss": 0.7227, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.3734702724332255, |
|
"grad_norm": 5.98908480573641, |
|
"learning_rate": 2.108016149628569e-05, |
|
"loss": 0.6875, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.3748040948347727, |
|
"grad_norm": 13.324804502845906, |
|
"learning_rate": 2.102206493201137e-05, |
|
"loss": 0.6693, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.37613791723632, |
|
"grad_norm": 2.877158805709884, |
|
"learning_rate": 2.096386047241123e-05, |
|
"loss": 0.6752, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.37747173963786723, |
|
"grad_norm": 3.417018003930411, |
|
"learning_rate": 2.0905549160315116e-05, |
|
"loss": 0.6874, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.37880556203941446, |
|
"grad_norm": 6.197947611584602, |
|
"learning_rate": 2.084713204046734e-05, |
|
"loss": 0.6995, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.3801393844409617, |
|
"grad_norm": 2.4400537269180327, |
|
"learning_rate": 2.078861015950793e-05, |
|
"loss": 0.718, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.3814732068425089, |
|
"grad_norm": 3.4313321352162878, |
|
"learning_rate": 2.072998456595387e-05, |
|
"loss": 0.6928, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.38280702924405613, |
|
"grad_norm": 3.323108743280233, |
|
"learning_rate": 2.0671256310180334e-05, |
|
"loss": 0.7141, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.3841408516456034, |
|
"grad_norm": 2.270407423855968, |
|
"learning_rate": 2.0612426444401874e-05, |
|
"loss": 0.6677, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.38547467404715063, |
|
"grad_norm": 4.473087793045971, |
|
"learning_rate": 2.0553496022653535e-05, |
|
"loss": 0.706, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.38680849644869786, |
|
"grad_norm": 4.498504602131192, |
|
"learning_rate": 2.0494466100772006e-05, |
|
"loss": 0.6783, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.3881423188502451, |
|
"grad_norm": 1.8721168603816298, |
|
"learning_rate": 2.0435337736376677e-05, |
|
"loss": 0.7327, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.3894761412517923, |
|
"grad_norm": 2.1819398242824093, |
|
"learning_rate": 2.03761119888507e-05, |
|
"loss": 0.6798, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.39080996365333953, |
|
"grad_norm": 29.747303047069977, |
|
"learning_rate": 2.031678991932201e-05, |
|
"loss": 0.7045, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.3921437860548868, |
|
"grad_norm": 4.708328967247123, |
|
"learning_rate": 2.0257372590644314e-05, |
|
"loss": 0.6896, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.39347760845643404, |
|
"grad_norm": 2.873510721340991, |
|
"learning_rate": 2.0197861067378044e-05, |
|
"loss": 0.6802, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.39481143085798126, |
|
"grad_norm": 4.540574995423212, |
|
"learning_rate": 2.0138256415771275e-05, |
|
"loss": 0.6219, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.3961452532595285, |
|
"grad_norm": 11.817372765224325, |
|
"learning_rate": 2.0078559703740654e-05, |
|
"loss": 0.65, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.3974790756610757, |
|
"grad_norm": 11.004144754692504, |
|
"learning_rate": 2.0018772000852216e-05, |
|
"loss": 0.7056, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.398812898062623, |
|
"grad_norm": 1.7365475356133573, |
|
"learning_rate": 1.9958894378302265e-05, |
|
"loss": 0.6827, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.4001467204641702, |
|
"grad_norm": 4.31426545646336, |
|
"learning_rate": 1.989892790889817e-05, |
|
"loss": 0.6796, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.40148054286571744, |
|
"grad_norm": 2.534413468413497, |
|
"learning_rate": 1.9838873667039134e-05, |
|
"loss": 0.6825, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.40281436526726466, |
|
"grad_norm": 2.5821079814088, |
|
"learning_rate": 1.9778732728696937e-05, |
|
"loss": 0.6522, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.4041481876688119, |
|
"grad_norm": 10.45675108188373, |
|
"learning_rate": 1.9718506171396694e-05, |
|
"loss": 0.6752, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.4054820100703591, |
|
"grad_norm": 10.969680268488736, |
|
"learning_rate": 1.965819507419751e-05, |
|
"loss": 0.7195, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.4068158324719064, |
|
"grad_norm": 9.540053007670354, |
|
"learning_rate": 1.9597800517673165e-05, |
|
"loss": 0.6762, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.4081496548734536, |
|
"grad_norm": 8.551702443669248, |
|
"learning_rate": 1.9537323583892753e-05, |
|
"loss": 0.7292, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.40948347727500084, |
|
"grad_norm": 3.0994689178852903, |
|
"learning_rate": 1.9476765356401304e-05, |
|
"loss": 0.6764, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.41081729967654806, |
|
"grad_norm": 3.1013298812228163, |
|
"learning_rate": 1.9416126920200344e-05, |
|
"loss": 0.6484, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.4121511220780953, |
|
"grad_norm": 2.00628497131861, |
|
"learning_rate": 1.9355409361728482e-05, |
|
"loss": 0.7094, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.4134849444796425, |
|
"grad_norm": 5.224082004633703, |
|
"learning_rate": 1.9294613768841932e-05, |
|
"loss": 0.7279, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.4148187668811898, |
|
"grad_norm": 18.62631978728915, |
|
"learning_rate": 1.9233741230795022e-05, |
|
"loss": 0.662, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.416152589282737, |
|
"grad_norm": 3.6495526914982968, |
|
"learning_rate": 1.9172792838220686e-05, |
|
"loss": 0.6836, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.41748641168428424, |
|
"grad_norm": 2.304337917905853, |
|
"learning_rate": 1.9111769683110914e-05, |
|
"loss": 0.6901, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.41882023408583147, |
|
"grad_norm": 8.427846401703292, |
|
"learning_rate": 1.905067285879719e-05, |
|
"loss": 0.6606, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.4201540564873787, |
|
"grad_norm": 2.2306668115119104, |
|
"learning_rate": 1.8989503459930908e-05, |
|
"loss": 0.7434, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.4214878788889259, |
|
"grad_norm": 2.231586663842237, |
|
"learning_rate": 1.892826258246376e-05, |
|
"loss": 0.7184, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.4228217012904732, |
|
"grad_norm": 5.804571835994344, |
|
"learning_rate": 1.886695132362808e-05, |
|
"loss": 0.7073, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.4241555236920204, |
|
"grad_norm": 4.7472512172058785, |
|
"learning_rate": 1.8805570781917228e-05, |
|
"loss": 0.7102, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.42548934609356764, |
|
"grad_norm": 1.723627694530291, |
|
"learning_rate": 1.8744122057065856e-05, |
|
"loss": 0.6828, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.42682316849511487, |
|
"grad_norm": 1.9952068710149184, |
|
"learning_rate": 1.868260625003024e-05, |
|
"loss": 0.6545, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.4281569908966621, |
|
"grad_norm": 4.588444559005735, |
|
"learning_rate": 1.8621024462968553e-05, |
|
"loss": 0.67, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.4294908132982093, |
|
"grad_norm": 2.155634253115107, |
|
"learning_rate": 1.85593777992211e-05, |
|
"loss": 0.7173, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.4308246356997566, |
|
"grad_norm": 3.3412948579128194, |
|
"learning_rate": 1.849766736329056e-05, |
|
"loss": 0.6364, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.4321584581013038, |
|
"grad_norm": 2.1344417176214607, |
|
"learning_rate": 1.8435894260822208e-05, |
|
"loss": 0.6919, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.43349228050285105, |
|
"grad_norm": 3.8410669902748764, |
|
"learning_rate": 1.8374059598584084e-05, |
|
"loss": 0.6524, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.43482610290439827, |
|
"grad_norm": 2.609728029777106, |
|
"learning_rate": 1.831216448444717e-05, |
|
"loss": 0.688, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.4361599253059455, |
|
"grad_norm": 2.182084710285402, |
|
"learning_rate": 1.8250210027365562e-05, |
|
"loss": 0.7327, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.4374937477074928, |
|
"grad_norm": 1.0672619638672702, |
|
"learning_rate": 1.818819733735657e-05, |
|
"loss": 0.7137, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.43882757010904, |
|
"grad_norm": 1.7248236414002174, |
|
"learning_rate": 1.812612752548084e-05, |
|
"loss": 0.6848, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.4401613925105872, |
|
"grad_norm": 2.717100059326369, |
|
"learning_rate": 1.806400170382246e-05, |
|
"loss": 0.6582, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.44149521491213445, |
|
"grad_norm": 2.7420980324781348, |
|
"learning_rate": 1.8001820985469026e-05, |
|
"loss": 0.6976, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.4428290373136817, |
|
"grad_norm": 3.9917362204420357, |
|
"learning_rate": 1.7939586484491704e-05, |
|
"loss": 0.7259, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.4441628597152289, |
|
"grad_norm": 3.2371945093430514, |
|
"learning_rate": 1.787729931592525e-05, |
|
"loss": 0.6883, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.4454966821167762, |
|
"grad_norm": 2.439245137250377, |
|
"learning_rate": 1.781496059574807e-05, |
|
"loss": 0.6876, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.4468305045183234, |
|
"grad_norm": 4.525984025887397, |
|
"learning_rate": 1.7752571440862178e-05, |
|
"loss": 0.6724, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.4481643269198706, |
|
"grad_norm": 2.3388903272276518, |
|
"learning_rate": 1.7690132969073223e-05, |
|
"loss": 0.7065, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.44949814932141785, |
|
"grad_norm": 6.946538587379132, |
|
"learning_rate": 1.7627646299070457e-05, |
|
"loss": 0.6444, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.4508319717229651, |
|
"grad_norm": 1.5334789635428385, |
|
"learning_rate": 1.7565112550406663e-05, |
|
"loss": 0.6597, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.4521657941245123, |
|
"grad_norm": 1.7438745925855814, |
|
"learning_rate": 1.7502532843478134e-05, |
|
"loss": 0.736, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.4534996165260596, |
|
"grad_norm": 2.352884928297456, |
|
"learning_rate": 1.743990829950458e-05, |
|
"loss": 0.7209, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.4548334389276068, |
|
"grad_norm": 2.589791551987411, |
|
"learning_rate": 1.737724004050903e-05, |
|
"loss": 0.6873, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.45616726132915403, |
|
"grad_norm": 1.5018800238986845, |
|
"learning_rate": 1.731452918929774e-05, |
|
"loss": 0.6993, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.45750108373070125, |
|
"grad_norm": 1.618737845945941, |
|
"learning_rate": 1.7251776869440097e-05, |
|
"loss": 0.719, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.4588349061322485, |
|
"grad_norm": 4.764891120811521, |
|
"learning_rate": 1.718898420524845e-05, |
|
"loss": 0.7066, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.4601687285337957, |
|
"grad_norm": 30.008073864717016, |
|
"learning_rate": 1.7126152321757985e-05, |
|
"loss": 0.7234, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.461502550935343, |
|
"grad_norm": 4.718402571866902, |
|
"learning_rate": 1.7063282344706577e-05, |
|
"loss": 0.671, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.4628363733368902, |
|
"grad_norm": 3.279168331496427, |
|
"learning_rate": 1.7000375400514602e-05, |
|
"loss": 0.6748, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.46417019573843743, |
|
"grad_norm": 4.202866783860852, |
|
"learning_rate": 1.693743261626476e-05, |
|
"loss": 0.7135, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.46550401813998465, |
|
"grad_norm": 2.959211747400748, |
|
"learning_rate": 1.68744551196819e-05, |
|
"loss": 0.6684, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.4668378405415319, |
|
"grad_norm": 3.7208053935256085, |
|
"learning_rate": 1.6811444039112787e-05, |
|
"loss": 0.6842, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.4681716629430791, |
|
"grad_norm": 1.8411337183473255, |
|
"learning_rate": 1.6748400503505905e-05, |
|
"loss": 0.6796, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.4695054853446264, |
|
"grad_norm": 1.5569024338481647, |
|
"learning_rate": 1.6685325642391223e-05, |
|
"loss": 0.7357, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.4708393077461736, |
|
"grad_norm": 2.30459532472586, |
|
"learning_rate": 1.662222058585996e-05, |
|
"loss": 0.6825, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.47217313014772083, |
|
"grad_norm": 1.6593076444414934, |
|
"learning_rate": 1.6559086464544334e-05, |
|
"loss": 0.7067, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.47350695254926806, |
|
"grad_norm": 2.6738168898709356, |
|
"learning_rate": 1.6495924409597305e-05, |
|
"loss": 0.665, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.4748407749508153, |
|
"grad_norm": 10.974918207024547, |
|
"learning_rate": 1.6432735552672317e-05, |
|
"loss": 0.705, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.4761745973523625, |
|
"grad_norm": 4.279092732465272, |
|
"learning_rate": 1.636952102590301e-05, |
|
"loss": 0.6858, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.4775084197539098, |
|
"grad_norm": 8.958608602390235, |
|
"learning_rate": 1.630628196188295e-05, |
|
"loss": 0.7022, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.478842242155457, |
|
"grad_norm": 1.2316277268276075, |
|
"learning_rate": 1.6243019493645315e-05, |
|
"loss": 0.7091, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.48017606455700423, |
|
"grad_norm": 1.6977852924595596, |
|
"learning_rate": 1.617973475464262e-05, |
|
"loss": 0.6725, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.48150988695855146, |
|
"grad_norm": 9.102696583046576, |
|
"learning_rate": 1.6116428878726396e-05, |
|
"loss": 0.706, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.4828437093600987, |
|
"grad_norm": 2.983654314671525, |
|
"learning_rate": 1.6053103000126874e-05, |
|
"loss": 0.6663, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.48417753176164596, |
|
"grad_norm": 2.9273555172026304, |
|
"learning_rate": 1.598975825343267e-05, |
|
"loss": 0.6986, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.4855113541631932, |
|
"grad_norm": 2.4687475856334613, |
|
"learning_rate": 1.5926395773570447e-05, |
|
"loss": 0.7192, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.4868451765647404, |
|
"grad_norm": 4.171039626246759, |
|
"learning_rate": 1.5863016695784604e-05, |
|
"loss": 0.6702, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.48817899896628764, |
|
"grad_norm": 3.8655482044779337, |
|
"learning_rate": 1.5799622155616887e-05, |
|
"loss": 0.6568, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.48951282136783486, |
|
"grad_norm": 2.8245022157946362, |
|
"learning_rate": 1.5736213288886112e-05, |
|
"loss": 0.7075, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.4908466437693821, |
|
"grad_norm": 2.1969432272158556, |
|
"learning_rate": 1.567279123166776e-05, |
|
"loss": 0.7043, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.49218046617092936, |
|
"grad_norm": 3.7154807458182835, |
|
"learning_rate": 1.560935712027364e-05, |
|
"loss": 0.6467, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.4935142885724766, |
|
"grad_norm": 4.060155573527941, |
|
"learning_rate": 1.5545912091231543e-05, |
|
"loss": 0.6957, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.4948481109740238, |
|
"grad_norm": 2.057087008440973, |
|
"learning_rate": 1.548245728126486e-05, |
|
"loss": 0.6656, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.49618193337557104, |
|
"grad_norm": 1.975534767472513, |
|
"learning_rate": 1.5418993827272224e-05, |
|
"loss": 0.6867, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.49751575577711826, |
|
"grad_norm": 11.237169875747464, |
|
"learning_rate": 1.5355522866307144e-05, |
|
"loss": 0.693, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.4988495781786655, |
|
"grad_norm": 2.7505125088389066, |
|
"learning_rate": 1.529204553555762e-05, |
|
"loss": 0.6715, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.5001834005802127, |
|
"grad_norm": 14.47964311360144, |
|
"learning_rate": 1.522856297232579e-05, |
|
"loss": 0.6638, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.5015172229817599, |
|
"grad_norm": 1.4576903787797197, |
|
"learning_rate": 1.5165076314007529e-05, |
|
"loss": 0.6461, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.5028510453833072, |
|
"grad_norm": 4.190097060433623, |
|
"learning_rate": 1.5101586698072095e-05, |
|
"loss": 0.6997, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.5041848677848545, |
|
"grad_norm": 2.6358802196743887, |
|
"learning_rate": 1.5038095262041725e-05, |
|
"loss": 0.6805, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.5055186901864017, |
|
"grad_norm": 2.9885793100944484, |
|
"learning_rate": 1.4974603143471268e-05, |
|
"loss": 0.663, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.506852512587949, |
|
"grad_norm": 3.364287860442736, |
|
"learning_rate": 1.4911111479927804e-05, |
|
"loss": 0.6851, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.5081863349894962, |
|
"grad_norm": 6.415730527817265, |
|
"learning_rate": 1.4847621408970266e-05, |
|
"loss": 0.6544, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.5095201573910434, |
|
"grad_norm": 1.6327349630681778, |
|
"learning_rate": 1.4784134068129043e-05, |
|
"loss": 0.6629, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.5108539797925906, |
|
"grad_norm": 3.0622996050606783, |
|
"learning_rate": 1.4720650594885614e-05, |
|
"loss": 0.6651, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.5121878021941378, |
|
"grad_norm": 5.445942430441996, |
|
"learning_rate": 1.4657172126652167e-05, |
|
"loss": 0.664, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.5135216245956851, |
|
"grad_norm": 4.518334654823446, |
|
"learning_rate": 1.459369980075121e-05, |
|
"loss": 0.6959, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.5148554469972323, |
|
"grad_norm": 1.8471627413065406, |
|
"learning_rate": 1.4530234754395207e-05, |
|
"loss": 0.6774, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.5161892693987795, |
|
"grad_norm": 3.6484122755334525, |
|
"learning_rate": 1.4466778124666192e-05, |
|
"loss": 0.6825, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.5175230918003267, |
|
"grad_norm": 2.087118207544068, |
|
"learning_rate": 1.4403331048495404e-05, |
|
"loss": 0.6985, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.5188569142018741, |
|
"grad_norm": 11.878313425481934, |
|
"learning_rate": 1.4339894662642914e-05, |
|
"loss": 0.6764, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.5201907366034213, |
|
"grad_norm": 2.5453717997032115, |
|
"learning_rate": 1.4276470103677257e-05, |
|
"loss": 0.7091, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.5215245590049685, |
|
"grad_norm": 4.791248513372535, |
|
"learning_rate": 1.4213058507955072e-05, |
|
"loss": 0.644, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.5228583814065157, |
|
"grad_norm": 2.1955258954683545, |
|
"learning_rate": 1.4149661011600734e-05, |
|
"loss": 0.6954, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.524192203808063, |
|
"grad_norm": 3.5143987933185676, |
|
"learning_rate": 1.4086278750486017e-05, |
|
"loss": 0.6848, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.5255260262096102, |
|
"grad_norm": 3.168504700204386, |
|
"learning_rate": 1.4022912860209709e-05, |
|
"loss": 0.6752, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.5268598486111574, |
|
"grad_norm": 1.9655682723891459, |
|
"learning_rate": 1.3959564476077308e-05, |
|
"loss": 0.6904, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.5281936710127046, |
|
"grad_norm": 1.6897897373972772, |
|
"learning_rate": 1.389623473308065e-05, |
|
"loss": 0.6929, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.5295274934142519, |
|
"grad_norm": 4.400154605229998, |
|
"learning_rate": 1.3832924765877587e-05, |
|
"loss": 0.726, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.5308613158157991, |
|
"grad_norm": 2.790842978581456, |
|
"learning_rate": 1.3769635708771654e-05, |
|
"loss": 0.6724, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.5321951382173463, |
|
"grad_norm": 1.5712798066752716, |
|
"learning_rate": 1.3706368695691745e-05, |
|
"loss": 0.6703, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.5335289606188935, |
|
"grad_norm": 5.340886291219129, |
|
"learning_rate": 1.3643124860171801e-05, |
|
"loss": 0.6595, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.5348627830204409, |
|
"grad_norm": 1.985940330857511, |
|
"learning_rate": 1.35799053353305e-05, |
|
"loss": 0.6892, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.5361966054219881, |
|
"grad_norm": 3.917331449757074, |
|
"learning_rate": 1.3516711253850949e-05, |
|
"loss": 0.6417, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.5375304278235353, |
|
"grad_norm": 1.66962823795828, |
|
"learning_rate": 1.3453543747960393e-05, |
|
"loss": 0.6784, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.5388642502250826, |
|
"grad_norm": 4.181035760200595, |
|
"learning_rate": 1.3390403949409943e-05, |
|
"loss": 0.7115, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.5401980726266298, |
|
"grad_norm": 2.4193575665243214, |
|
"learning_rate": 1.3327292989454273e-05, |
|
"loss": 0.7104, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.541531895028177, |
|
"grad_norm": 2.0442192962046275, |
|
"learning_rate": 1.3264211998831374e-05, |
|
"loss": 0.7008, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.5428657174297242, |
|
"grad_norm": 3.0689852808863183, |
|
"learning_rate": 1.3201162107742285e-05, |
|
"loss": 0.677, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.5441995398312715, |
|
"grad_norm": 2.22632841251654, |
|
"learning_rate": 1.3138144445830841e-05, |
|
"loss": 0.6223, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.5455333622328187, |
|
"grad_norm": 8.813265719863766, |
|
"learning_rate": 1.3075160142163442e-05, |
|
"loss": 0.6791, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.5468671846343659, |
|
"grad_norm": 2.461550778463616, |
|
"learning_rate": 1.3012210325208818e-05, |
|
"loss": 0.7165, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.5482010070359131, |
|
"grad_norm": 2.1304508310591896, |
|
"learning_rate": 1.2949296122817813e-05, |
|
"loss": 0.6905, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.5495348294374603, |
|
"grad_norm": 2.1733622775851535, |
|
"learning_rate": 1.2886418662203174e-05, |
|
"loss": 0.6963, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.5508686518390077, |
|
"grad_norm": 2.654530675610581, |
|
"learning_rate": 1.282357906991936e-05, |
|
"loss": 0.6796, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.5522024742405549, |
|
"grad_norm": 2.6976858995246085, |
|
"learning_rate": 1.276077847184236e-05, |
|
"loss": 0.6922, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.5535362966421021, |
|
"grad_norm": 2.5591371381474857, |
|
"learning_rate": 1.2698017993149504e-05, |
|
"loss": 0.7047, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.5548701190436494, |
|
"grad_norm": 6.439964637422321, |
|
"learning_rate": 1.2635298758299336e-05, |
|
"loss": 0.6722, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.5562039414451966, |
|
"grad_norm": 1.6222259612163727, |
|
"learning_rate": 1.2572621891011426e-05, |
|
"loss": 0.6646, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.5575377638467438, |
|
"grad_norm": 3.410425968580818, |
|
"learning_rate": 1.2509988514246272e-05, |
|
"loss": 0.6894, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.558871586248291, |
|
"grad_norm": 2.7111542804682327, |
|
"learning_rate": 1.2447399750185166e-05, |
|
"loss": 0.7196, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.5602054086498383, |
|
"grad_norm": 3.3657872237953868, |
|
"learning_rate": 1.2384856720210086e-05, |
|
"loss": 0.7052, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.5615392310513855, |
|
"grad_norm": 3.4383001609998143, |
|
"learning_rate": 1.2322360544883608e-05, |
|
"loss": 0.664, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.5628730534529327, |
|
"grad_norm": 4.31412552867304, |
|
"learning_rate": 1.2259912343928831e-05, |
|
"loss": 0.6923, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.5642068758544799, |
|
"grad_norm": 2.9738159323747655, |
|
"learning_rate": 1.2197513236209312e-05, |
|
"loss": 0.6787, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.5655406982560273, |
|
"grad_norm": 14.42279175461777, |
|
"learning_rate": 1.213516433970902e-05, |
|
"loss": 0.7313, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.5668745206575745, |
|
"grad_norm": 2.6156276324588195, |
|
"learning_rate": 1.2072866771512306e-05, |
|
"loss": 0.6856, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.5682083430591217, |
|
"grad_norm": 2.692794641012978, |
|
"learning_rate": 1.201062164778389e-05, |
|
"loss": 0.6587, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.5695421654606689, |
|
"grad_norm": 3.01896569407463, |
|
"learning_rate": 1.1948430083748864e-05, |
|
"loss": 0.7225, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.5708759878622162, |
|
"grad_norm": 2.266424840293995, |
|
"learning_rate": 1.1886293193672707e-05, |
|
"loss": 0.6847, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.5722098102637634, |
|
"grad_norm": 2.2789387948762987, |
|
"learning_rate": 1.1824212090841321e-05, |
|
"loss": 0.7011, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.5735436326653106, |
|
"grad_norm": 2.826447974943076, |
|
"learning_rate": 1.1762187887541088e-05, |
|
"loss": 0.689, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.5748774550668578, |
|
"grad_norm": 2.565293440960005, |
|
"learning_rate": 1.1700221695038944e-05, |
|
"loss": 0.7077, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.5762112774684051, |
|
"grad_norm": 4.459154190124916, |
|
"learning_rate": 1.1638314623562459e-05, |
|
"loss": 0.6885, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.5775450998699523, |
|
"grad_norm": 1.8187338733285852, |
|
"learning_rate": 1.1576467782279953e-05, |
|
"loss": 0.7103, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.5788789222714995, |
|
"grad_norm": 4.078050868504266, |
|
"learning_rate": 1.1514682279280621e-05, |
|
"loss": 0.6742, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.5802127446730467, |
|
"grad_norm": 2.4612673583806233, |
|
"learning_rate": 1.1452959221554684e-05, |
|
"loss": 0.6941, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.5815465670745941, |
|
"grad_norm": 8.05059787591381, |
|
"learning_rate": 1.1391299714973553e-05, |
|
"loss": 0.7072, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.5828803894761413, |
|
"grad_norm": 5.041675641180621, |
|
"learning_rate": 1.1329704864270005e-05, |
|
"loss": 0.6914, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.5842142118776885, |
|
"grad_norm": 3.8176735967050672, |
|
"learning_rate": 1.1268175773018409e-05, |
|
"loss": 0.6489, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.5855480342792357, |
|
"grad_norm": 2.068471874891413, |
|
"learning_rate": 1.1206713543614942e-05, |
|
"loss": 0.7182, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.586881856680783, |
|
"grad_norm": 4.7154770167485065, |
|
"learning_rate": 1.1145319277257834e-05, |
|
"loss": 0.6961, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.5882156790823302, |
|
"grad_norm": 3.3453200032391917, |
|
"learning_rate": 1.108399407392765e-05, |
|
"loss": 0.701, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.5895495014838774, |
|
"grad_norm": 3.462978751346215, |
|
"learning_rate": 1.1022739032367572e-05, |
|
"loss": 0.6504, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.5908833238854246, |
|
"grad_norm": 3.9283885591229075, |
|
"learning_rate": 1.0961555250063718e-05, |
|
"loss": 0.7025, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.5922171462869719, |
|
"grad_norm": 2.2363832425317463, |
|
"learning_rate": 1.090044382322548e-05, |
|
"loss": 0.7106, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.5935509686885191, |
|
"grad_norm": 2.4683539157329544, |
|
"learning_rate": 1.083940584676588e-05, |
|
"loss": 0.6919, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.5948847910900663, |
|
"grad_norm": 1.6027050129978238, |
|
"learning_rate": 1.077844241428195e-05, |
|
"loss": 0.6579, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.5962186134916136, |
|
"grad_norm": 4.272201666240297, |
|
"learning_rate": 1.071755461803515e-05, |
|
"loss": 0.6992, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.5975524358931609, |
|
"grad_norm": 4.847908056514074, |
|
"learning_rate": 1.0656743548931784e-05, |
|
"loss": 0.6858, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.5988862582947081, |
|
"grad_norm": 1.899776347699883, |
|
"learning_rate": 1.0596010296503469e-05, |
|
"loss": 0.7175, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.6002200806962553, |
|
"grad_norm": 3.6851504324405533, |
|
"learning_rate": 1.0535355948887598e-05, |
|
"loss": 0.6731, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.6015539030978025, |
|
"grad_norm": 20.935216614062877, |
|
"learning_rate": 1.0474781592807854e-05, |
|
"loss": 0.6548, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.6028877254993498, |
|
"grad_norm": 5.577424675925709, |
|
"learning_rate": 1.0414288313554746e-05, |
|
"loss": 0.7263, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.604221547900897, |
|
"grad_norm": 2.9726973141053334, |
|
"learning_rate": 1.0353877194966152e-05, |
|
"loss": 0.7446, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.6055553703024442, |
|
"grad_norm": 2.021480129071628, |
|
"learning_rate": 1.0293549319407901e-05, |
|
"loss": 0.7137, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.6068891927039914, |
|
"grad_norm": 1.9390208520343517, |
|
"learning_rate": 1.0233305767754391e-05, |
|
"loss": 0.6998, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.6082230151055387, |
|
"grad_norm": 2.2439008274229337, |
|
"learning_rate": 1.0173147619369212e-05, |
|
"loss": 0.6977, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.6095568375070859, |
|
"grad_norm": 3.002628922946286, |
|
"learning_rate": 1.0113075952085815e-05, |
|
"loss": 0.7119, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.6108906599086331, |
|
"grad_norm": 1.8784698804400835, |
|
"learning_rate": 1.0053091842188196e-05, |
|
"loss": 0.6813, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.6122244823101805, |
|
"grad_norm": 3.6775461109208702, |
|
"learning_rate": 9.993196364391614e-06, |
|
"loss": 0.6963, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.6135583047117277, |
|
"grad_norm": 3.0082378136289636, |
|
"learning_rate": 9.93339059182334e-06, |
|
"loss": 0.6761, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.6148921271132749, |
|
"grad_norm": 2.0259105048263297, |
|
"learning_rate": 9.873675596003424e-06, |
|
"loss": 0.6645, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.6162259495148221, |
|
"grad_norm": 7.087002002369676, |
|
"learning_rate": 9.8140524468255e-06, |
|
"loss": 0.6836, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.6175597719163693, |
|
"grad_norm": 6.82917662319771, |
|
"learning_rate": 9.754522212537614e-06, |
|
"loss": 0.6546, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.6188935943179166, |
|
"grad_norm": 2.7798504683532546, |
|
"learning_rate": 9.695085959723088e-06, |
|
"loss": 0.6879, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.6202274167194638, |
|
"grad_norm": 2.9169362806410124, |
|
"learning_rate": 9.63574475328141e-06, |
|
"loss": 0.7287, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.621561239121011, |
|
"grad_norm": 1.9790125803612642, |
|
"learning_rate": 9.576499656409158e-06, |
|
"loss": 0.6933, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.6228950615225582, |
|
"grad_norm": 3.533798783312709, |
|
"learning_rate": 9.517351730580939e-06, |
|
"loss": 0.6763, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.6242288839241055, |
|
"grad_norm": 4.906070778847422, |
|
"learning_rate": 9.458302035530384e-06, |
|
"loss": 0.7089, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.6255627063256527, |
|
"grad_norm": 3.448200148869349, |
|
"learning_rate": 9.399351629231154e-06, |
|
"loss": 0.6911, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.6268965287271999, |
|
"grad_norm": 7.159835250493477, |
|
"learning_rate": 9.340501567877989e-06, |
|
"loss": 0.6387, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.6282303511287473, |
|
"grad_norm": 7.692987979738203, |
|
"learning_rate": 9.281752905867778e-06, |
|
"loss": 0.6239, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.6295641735302945, |
|
"grad_norm": 3.0193719069272187, |
|
"learning_rate": 9.223106695780677e-06, |
|
"loss": 0.6755, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.6308979959318417, |
|
"grad_norm": 2.31533598338176, |
|
"learning_rate": 9.164563988361242e-06, |
|
"loss": 0.6772, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.6322318183333889, |
|
"grad_norm": 4.426498530040442, |
|
"learning_rate": 9.106125832499604e-06, |
|
"loss": 0.689, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.6335656407349362, |
|
"grad_norm": 3.055489437274157, |
|
"learning_rate": 9.047793275212686e-06, |
|
"loss": 0.687, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.6348994631364834, |
|
"grad_norm": 7.12383434732346, |
|
"learning_rate": 8.989567361625427e-06, |
|
"loss": 0.6604, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.6362332855380306, |
|
"grad_norm": 2.6553339556706788, |
|
"learning_rate": 8.931449134952075e-06, |
|
"loss": 0.6866, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.6375671079395778, |
|
"grad_norm": 3.1179277884273806, |
|
"learning_rate": 8.873439636477484e-06, |
|
"loss": 0.6599, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.638900930341125, |
|
"grad_norm": 2.4632186731676993, |
|
"learning_rate": 8.815539905538459e-06, |
|
"loss": 0.6957, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.6402347527426723, |
|
"grad_norm": 2.243620092515075, |
|
"learning_rate": 8.757750979505137e-06, |
|
"loss": 0.678, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.6415685751442195, |
|
"grad_norm": 1.559849971379389, |
|
"learning_rate": 8.700073893762408e-06, |
|
"loss": 0.675, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.6429023975457668, |
|
"grad_norm": 2.174037487987736, |
|
"learning_rate": 8.642509681691347e-06, |
|
"loss": 0.6654, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.6442362199473141, |
|
"grad_norm": 4.920990395145698, |
|
"learning_rate": 8.585059374650717e-06, |
|
"loss": 0.6839, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.6455700423488613, |
|
"grad_norm": 2.4413434562237115, |
|
"learning_rate": 8.527724001958476e-06, |
|
"loss": 0.7275, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.6469038647504085, |
|
"grad_norm": 1.8344146906183378, |
|
"learning_rate": 8.470504590873346e-06, |
|
"loss": 0.6961, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.6482376871519557, |
|
"grad_norm": 1.9274949192717368, |
|
"learning_rate": 8.413402166576397e-06, |
|
"loss": 0.6802, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.649571509553503, |
|
"grad_norm": 2.823435230180327, |
|
"learning_rate": 8.3564177521527e-06, |
|
"loss": 0.6545, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.6509053319550502, |
|
"grad_norm": 12.01208015208349, |
|
"learning_rate": 8.29955236857297e-06, |
|
"loss": 0.6608, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.6522391543565974, |
|
"grad_norm": 2.7110104949534146, |
|
"learning_rate": 8.242807034675289e-06, |
|
"loss": 0.6825, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.6535729767581446, |
|
"grad_norm": 3.5400540139770467, |
|
"learning_rate": 8.186182767146848e-06, |
|
"loss": 0.7173, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.6549067991596919, |
|
"grad_norm": 2.22571512795563, |
|
"learning_rate": 8.12968058050574e-06, |
|
"loss": 0.6979, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.6562406215612391, |
|
"grad_norm": 9.112812839574897, |
|
"learning_rate": 8.073301487082768e-06, |
|
"loss": 0.7059, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.6575744439627863, |
|
"grad_norm": 3.3720066783743867, |
|
"learning_rate": 8.017046497003308e-06, |
|
"loss": 0.6896, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.6589082663643336, |
|
"grad_norm": 6.1591898293726945, |
|
"learning_rate": 7.960916618169233e-06, |
|
"loss": 0.697, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.6602420887658809, |
|
"grad_norm": 2.7295855620488716, |
|
"learning_rate": 7.904912856240833e-06, |
|
"loss": 0.6892, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.6615759111674281, |
|
"grad_norm": 2.345166503103055, |
|
"learning_rate": 7.849036214618802e-06, |
|
"loss": 0.6748, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.6629097335689753, |
|
"grad_norm": 7.498544170227416, |
|
"learning_rate": 7.793287694426263e-06, |
|
"loss": 0.7016, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.6642435559705225, |
|
"grad_norm": 2.0475845854151986, |
|
"learning_rate": 7.737668294490834e-06, |
|
"loss": 0.7179, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.6655773783720698, |
|
"grad_norm": 3.114340952412098, |
|
"learning_rate": 7.68217901132672e-06, |
|
"loss": 0.6793, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.666911200773617, |
|
"grad_norm": 3.83883079292243, |
|
"learning_rate": 7.626820839116876e-06, |
|
"loss": 0.6876, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.6682450231751642, |
|
"grad_norm": 8.432894431693887, |
|
"learning_rate": 7.571594769695181e-06, |
|
"loss": 0.6874, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.6695788455767114, |
|
"grad_norm": 2.674070025495073, |
|
"learning_rate": 7.51650179252867e-06, |
|
"loss": 0.7172, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.6709126679782587, |
|
"grad_norm": 5.973778009949667, |
|
"learning_rate": 7.461542894699818e-06, |
|
"loss": 0.7133, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.6722464903798059, |
|
"grad_norm": 3.2677383520376795, |
|
"learning_rate": 7.406719060888837e-06, |
|
"loss": 0.7314, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.6735803127813531, |
|
"grad_norm": 4.172418337526236, |
|
"learning_rate": 7.352031273356045e-06, |
|
"loss": 0.6805, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.6749141351829004, |
|
"grad_norm": 2.1121612232039477, |
|
"learning_rate": 7.297480511924263e-06, |
|
"loss": 0.664, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.6762479575844477, |
|
"grad_norm": 4.737396987962653, |
|
"learning_rate": 7.243067753961267e-06, |
|
"loss": 0.7202, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.6775817799859949, |
|
"grad_norm": 2.2892995457734053, |
|
"learning_rate": 7.188793974362254e-06, |
|
"loss": 0.6675, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.6789156023875421, |
|
"grad_norm": 2.5303725617403647, |
|
"learning_rate": 7.13466014553241e-06, |
|
"loss": 0.7053, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.6802494247890893, |
|
"grad_norm": 2.1911360902224315, |
|
"learning_rate": 7.080667237369468e-06, |
|
"loss": 0.6363, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.6815832471906366, |
|
"grad_norm": 1.7722416009046082, |
|
"learning_rate": 7.0268162172463215e-06, |
|
"loss": 0.6647, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.6829170695921838, |
|
"grad_norm": 2.3107884233221396, |
|
"learning_rate": 6.973108049993714e-06, |
|
"loss": 0.6566, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.684250891993731, |
|
"grad_norm": 2.374293674041196, |
|
"learning_rate": 6.919543697882938e-06, |
|
"loss": 0.6772, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.6855847143952782, |
|
"grad_norm": 1.323614967454432, |
|
"learning_rate": 6.866124120608596e-06, |
|
"loss": 0.7142, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.6869185367968255, |
|
"grad_norm": 2.5080725989534463, |
|
"learning_rate": 6.812850275271412e-06, |
|
"loss": 0.6672, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.6882523591983727, |
|
"grad_norm": 2.4821500591867527, |
|
"learning_rate": 6.759723116361077e-06, |
|
"loss": 0.6752, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.68958618159992, |
|
"grad_norm": 1.5591146033827414, |
|
"learning_rate": 6.706743595739151e-06, |
|
"loss": 0.6816, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.6909200040014672, |
|
"grad_norm": 3.7818920756836967, |
|
"learning_rate": 6.653912662622009e-06, |
|
"loss": 0.6865, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.6922538264030145, |
|
"grad_norm": 1.4437006855258527, |
|
"learning_rate": 6.601231263563832e-06, |
|
"loss": 0.6606, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.6935876488045617, |
|
"grad_norm": 16.033675993643524, |
|
"learning_rate": 6.548700342439648e-06, |
|
"loss": 0.6438, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.6949214712061089, |
|
"grad_norm": 4.180984666556721, |
|
"learning_rate": 6.496320840428426e-06, |
|
"loss": 0.6929, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.6962552936076561, |
|
"grad_norm": 2.58067260639148, |
|
"learning_rate": 6.444093695996205e-06, |
|
"loss": 0.6869, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.6975891160092034, |
|
"grad_norm": 5.857865039816727, |
|
"learning_rate": 6.392019844879289e-06, |
|
"loss": 0.691, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.6989229384107506, |
|
"grad_norm": 1.7969091660941678, |
|
"learning_rate": 6.340100220067473e-06, |
|
"loss": 0.6887, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.7002567608122978, |
|
"grad_norm": 4.64525683921728, |
|
"learning_rate": 6.28833575178733e-06, |
|
"loss": 0.7081, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.701590583213845, |
|
"grad_norm": 30.247747163980556, |
|
"learning_rate": 6.23672736748555e-06, |
|
"loss": 0.6977, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.7029244056153923, |
|
"grad_norm": 4.753750976434006, |
|
"learning_rate": 6.1852759918123145e-06, |
|
"loss": 0.6724, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.7042582280169395, |
|
"grad_norm": 1.9245050933564345, |
|
"learning_rate": 6.133982546604735e-06, |
|
"loss": 0.6498, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.7055920504184868, |
|
"grad_norm": 8.580113915174046, |
|
"learning_rate": 6.082847950870334e-06, |
|
"loss": 0.6798, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.706925872820034, |
|
"grad_norm": 5.254260741119434, |
|
"learning_rate": 6.031873120770585e-06, |
|
"loss": 0.7094, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.7082596952215813, |
|
"grad_norm": 4.267395769845368, |
|
"learning_rate": 5.9810589696044935e-06, |
|
"loss": 0.6447, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.7095935176231285, |
|
"grad_norm": 2.1531210048068203, |
|
"learning_rate": 5.9304064077922274e-06, |
|
"loss": 0.6686, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.7109273400246757, |
|
"grad_norm": 3.0627830530266467, |
|
"learning_rate": 5.879916342858821e-06, |
|
"loss": 0.6987, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.712261162426223, |
|
"grad_norm": 3.5786494907624546, |
|
"learning_rate": 5.829589679417901e-06, |
|
"loss": 0.7058, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.7135949848277702, |
|
"grad_norm": 4.212959667448277, |
|
"learning_rate": 5.779427319155485e-06, |
|
"loss": 0.6698, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.7149288072293174, |
|
"grad_norm": 5.081476320526283, |
|
"learning_rate": 5.7294301608138274e-06, |
|
"loss": 0.6978, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.7162626296308646, |
|
"grad_norm": 2.2390204735144006, |
|
"learning_rate": 5.679599100175312e-06, |
|
"loss": 0.6923, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.7175964520324118, |
|
"grad_norm": 3.722611802307121, |
|
"learning_rate": 5.629935030046409e-06, |
|
"loss": 0.7111, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.7189302744339591, |
|
"grad_norm": 8.85305178336297, |
|
"learning_rate": 5.580438840241671e-06, |
|
"loss": 0.7014, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.7202640968355064, |
|
"grad_norm": 2.7228108037923406, |
|
"learning_rate": 5.531111417567799e-06, |
|
"loss": 0.6918, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.7215979192370536, |
|
"grad_norm": 2.774305095096996, |
|
"learning_rate": 5.48195364580775e-06, |
|
"loss": 0.6986, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.7229317416386009, |
|
"grad_norm": 3.3079542242294053, |
|
"learning_rate": 5.432966405704895e-06, |
|
"loss": 0.702, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.7242655640401481, |
|
"grad_norm": 4.313463849702513, |
|
"learning_rate": 5.384150574947258e-06, |
|
"loss": 0.7034, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.7255993864416953, |
|
"grad_norm": 10.27608427466511, |
|
"learning_rate": 5.335507028151768e-06, |
|
"loss": 0.6847, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.7269332088432425, |
|
"grad_norm": 1.3643574661197668, |
|
"learning_rate": 5.2870366368486074e-06, |
|
"loss": 0.6652, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.7282670312447898, |
|
"grad_norm": 2.0571595612294313, |
|
"learning_rate": 5.238740269465584e-06, |
|
"loss": 0.7057, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.729600853646337, |
|
"grad_norm": 2.177709522875291, |
|
"learning_rate": 5.190618791312581e-06, |
|
"loss": 0.7147, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.7309346760478842, |
|
"grad_norm": 3.4529351229700134, |
|
"learning_rate": 5.142673064566048e-06, |
|
"loss": 0.6676, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.7322684984494314, |
|
"grad_norm": 2.0423403296749534, |
|
"learning_rate": 5.094903948253557e-06, |
|
"loss": 0.6519, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.7336023208509787, |
|
"grad_norm": 2.6755745244049045, |
|
"learning_rate": 5.047312298238407e-06, |
|
"loss": 0.6553, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.7349361432525259, |
|
"grad_norm": 2.0735822085238365, |
|
"learning_rate": 4.999898967204293e-06, |
|
"loss": 0.6853, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.7362699656540732, |
|
"grad_norm": 1.4637677526008275, |
|
"learning_rate": 4.952664804640032e-06, |
|
"loss": 0.6382, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.7376037880556204, |
|
"grad_norm": 10.3457254327091, |
|
"learning_rate": 4.905610656824338e-06, |
|
"loss": 0.6963, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.7389376104571677, |
|
"grad_norm": 3.7186152329588307, |
|
"learning_rate": 4.858737366810661e-06, |
|
"loss": 0.6892, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.7402714328587149, |
|
"grad_norm": 3.866784770301919, |
|
"learning_rate": 4.812045774412074e-06, |
|
"loss": 0.6976, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.7416052552602621, |
|
"grad_norm": 1.2902307476254877, |
|
"learning_rate": 4.765536716186247e-06, |
|
"loss": 0.651, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.7429390776618093, |
|
"grad_norm": 5.660668096573102, |
|
"learning_rate": 4.719211025420436e-06, |
|
"loss": 0.6715, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.7442729000633566, |
|
"grad_norm": 1.7865867976347491, |
|
"learning_rate": 4.673069532116575e-06, |
|
"loss": 0.683, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.7456067224649038, |
|
"grad_norm": 8.841085187491435, |
|
"learning_rate": 4.627113062976379e-06, |
|
"loss": 0.7104, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.746940544866451, |
|
"grad_norm": 1.8568471185779514, |
|
"learning_rate": 4.581342441386563e-06, |
|
"loss": 0.6738, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.7482743672679982, |
|
"grad_norm": 8.613029244471035, |
|
"learning_rate": 4.53575848740406e-06, |
|
"loss": 0.6907, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.7496081896695455, |
|
"grad_norm": 5.379674431405216, |
|
"learning_rate": 4.490362017741346e-06, |
|
"loss": 0.6547, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.7509420120710927, |
|
"grad_norm": 40.42031518782124, |
|
"learning_rate": 4.445153845751808e-06, |
|
"loss": 0.6615, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.75227583447264, |
|
"grad_norm": 2.262382554857583, |
|
"learning_rate": 4.4001347814151625e-06, |
|
"loss": 0.7268, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.7536096568741872, |
|
"grad_norm": 4.773999070319672, |
|
"learning_rate": 4.355305631322943e-06, |
|
"loss": 0.6671, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.7549434792757345, |
|
"grad_norm": 2.4606314332551893, |
|
"learning_rate": 4.31066719866406e-06, |
|
"loss": 0.6989, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.7562773016772817, |
|
"grad_norm": 3.715537855764248, |
|
"learning_rate": 4.266220283210403e-06, |
|
"loss": 0.7022, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.7576111240788289, |
|
"grad_norm": 2.812731415446895, |
|
"learning_rate": 4.221965681302506e-06, |
|
"loss": 0.6742, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.7589449464803761, |
|
"grad_norm": 2.21543044943041, |
|
"learning_rate": 4.177904185835289e-06, |
|
"loss": 0.6574, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.7602787688819234, |
|
"grad_norm": 2.5996433086745623, |
|
"learning_rate": 4.134036586243852e-06, |
|
"loss": 0.6634, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.7616125912834706, |
|
"grad_norm": 1.4311723346543919, |
|
"learning_rate": 4.0903636684893205e-06, |
|
"loss": 0.6986, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.7629464136850178, |
|
"grad_norm": 1.5544477881800915, |
|
"learning_rate": 4.046886215044773e-06, |
|
"loss": 0.6982, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.764280236086565, |
|
"grad_norm": 50.87643723015965, |
|
"learning_rate": 4.003605004881224e-06, |
|
"loss": 0.6944, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.7656140584881123, |
|
"grad_norm": 3.3621842813994958, |
|
"learning_rate": 3.960520813453654e-06, |
|
"loss": 0.6388, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.7669478808896596, |
|
"grad_norm": 1.6373066358476196, |
|
"learning_rate": 3.917634412687132e-06, |
|
"loss": 0.6381, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.7682817032912068, |
|
"grad_norm": 4.021243191453306, |
|
"learning_rate": 3.874946570962977e-06, |
|
"loss": 0.679, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.769615525692754, |
|
"grad_norm": 2.1771219666869404, |
|
"learning_rate": 3.832458053104985e-06, |
|
"loss": 0.65, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.7709493480943013, |
|
"grad_norm": 4.117022111093457, |
|
"learning_rate": 3.790169620365742e-06, |
|
"loss": 0.6968, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.7722831704958485, |
|
"grad_norm": 1.7554846375395396, |
|
"learning_rate": 3.748082030412971e-06, |
|
"loss": 0.6943, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.7736169928973957, |
|
"grad_norm": 1.0196062550048701, |
|
"learning_rate": 3.7061960373159603e-06, |
|
"loss": 0.6867, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.7749508152989429, |
|
"grad_norm": 1.7419968573435696, |
|
"learning_rate": 3.66451239153206e-06, |
|
"loss": 0.6265, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 0.7762846377004902, |
|
"grad_norm": 2.3677756205266065, |
|
"learning_rate": 3.623031839893226e-06, |
|
"loss": 0.7055, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.7776184601020374, |
|
"grad_norm": 3.9343316788594915, |
|
"learning_rate": 3.5817551255926473e-06, |
|
"loss": 0.6789, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 0.7789522825035846, |
|
"grad_norm": 2.48678634987911, |
|
"learning_rate": 3.5406829881714254e-06, |
|
"loss": 0.6917, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.7802861049051318, |
|
"grad_norm": 3.0165585668876678, |
|
"learning_rate": 3.4998161635053274e-06, |
|
"loss": 0.6703, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.7816199273066791, |
|
"grad_norm": 1.6901003264945378, |
|
"learning_rate": 3.459155383791601e-06, |
|
"loss": 0.6551, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.7829537497082264, |
|
"grad_norm": 3.1065648861476727, |
|
"learning_rate": 3.4187013775358515e-06, |
|
"loss": 0.6593, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 0.7842875721097736, |
|
"grad_norm": 3.5522700882458516, |
|
"learning_rate": 3.3784548695389993e-06, |
|
"loss": 0.6649, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.7856213945113208, |
|
"grad_norm": 3.2401137004728997, |
|
"learning_rate": 3.338416580884284e-06, |
|
"loss": 0.7241, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.7869552169128681, |
|
"grad_norm": 4.874888492189262, |
|
"learning_rate": 3.2985872289243466e-06, |
|
"loss": 0.6394, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.7882890393144153, |
|
"grad_norm": 2.987557037958223, |
|
"learning_rate": 3.2589675272683855e-06, |
|
"loss": 0.6662, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.7896228617159625, |
|
"grad_norm": 5.926328730580206, |
|
"learning_rate": 3.2195581857693595e-06, |
|
"loss": 0.6915, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.7909566841175097, |
|
"grad_norm": 2.907571799653725, |
|
"learning_rate": 3.180359910511275e-06, |
|
"loss": 0.6684, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 0.792290506519057, |
|
"grad_norm": 8.236684978263545, |
|
"learning_rate": 3.1413734037965386e-06, |
|
"loss": 0.6998, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.7936243289206042, |
|
"grad_norm": 3.2966043804161584, |
|
"learning_rate": 3.102599364133366e-06, |
|
"loss": 0.7251, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.7949581513221514, |
|
"grad_norm": 2.1556446497268507, |
|
"learning_rate": 3.0640384862232756e-06, |
|
"loss": 0.6811, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.7962919737236986, |
|
"grad_norm": 3.636606511761579, |
|
"learning_rate": 3.0256914609486367e-06, |
|
"loss": 0.6736, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.797625796125246, |
|
"grad_norm": 1.5667682334090924, |
|
"learning_rate": 2.9875589753602926e-06, |
|
"loss": 0.7156, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 0.7989596185267932, |
|
"grad_norm": 2.0397471013105246, |
|
"learning_rate": 2.9496417126652476e-06, |
|
"loss": 0.6691, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 0.8002934409283404, |
|
"grad_norm": 16.740171367173748, |
|
"learning_rate": 2.911940352214437e-06, |
|
"loss": 0.6677, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.8016272633298877, |
|
"grad_norm": 131.63779763284177, |
|
"learning_rate": 2.874455569490535e-06, |
|
"loss": 0.7027, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 0.8029610857314349, |
|
"grad_norm": 2.5173921242626873, |
|
"learning_rate": 2.8371880360958764e-06, |
|
"loss": 0.6399, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 0.8042949081329821, |
|
"grad_norm": 10.518961542895665, |
|
"learning_rate": 2.800138419740408e-06, |
|
"loss": 0.6518, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.8056287305345293, |
|
"grad_norm": 3.3475848872480505, |
|
"learning_rate": 2.76330738422973e-06, |
|
"loss": 0.6879, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 0.8069625529360765, |
|
"grad_norm": 2.1730977739719166, |
|
"learning_rate": 2.7266955894532046e-06, |
|
"loss": 0.6781, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.8082963753376238, |
|
"grad_norm": 2.072678851059398, |
|
"learning_rate": 2.6903036913721285e-06, |
|
"loss": 0.7262, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.809630197739171, |
|
"grad_norm": 1.6941591901687694, |
|
"learning_rate": 2.6541323420079832e-06, |
|
"loss": 0.6686, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 0.8109640201407182, |
|
"grad_norm": 4.28150571630244, |
|
"learning_rate": 2.6181821894307534e-06, |
|
"loss": 0.7151, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.8122978425422654, |
|
"grad_norm": 5.224988669059186, |
|
"learning_rate": 2.582453877747313e-06, |
|
"loss": 0.6516, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.8136316649438128, |
|
"grad_norm": 2.736915062547764, |
|
"learning_rate": 2.546948047089889e-06, |
|
"loss": 0.692, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.81496548734536, |
|
"grad_norm": 3.209445281777465, |
|
"learning_rate": 2.5116653336045905e-06, |
|
"loss": 0.6929, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 0.8162993097469072, |
|
"grad_norm": 18.35451398413344, |
|
"learning_rate": 2.4766063694400064e-06, |
|
"loss": 0.6409, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.8176331321484545, |
|
"grad_norm": 2.7769421422561162, |
|
"learning_rate": 2.4417717827358895e-06, |
|
"loss": 0.6802, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 0.8189669545500017, |
|
"grad_norm": 5.202682155516338, |
|
"learning_rate": 2.4071621976118928e-06, |
|
"loss": 0.7161, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 0.8203007769515489, |
|
"grad_norm": 1.8785422878709268, |
|
"learning_rate": 2.3727782341563915e-06, |
|
"loss": 0.6637, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.8216345993530961, |
|
"grad_norm": 4.160045067102897, |
|
"learning_rate": 2.3386205084153754e-06, |
|
"loss": 0.6853, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 0.8229684217546434, |
|
"grad_norm": 11.6299591971655, |
|
"learning_rate": 2.304689632381407e-06, |
|
"loss": 0.6898, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 0.8243022441561906, |
|
"grad_norm": 2.19668666417576, |
|
"learning_rate": 2.2709862139826554e-06, |
|
"loss": 0.681, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.8256360665577378, |
|
"grad_norm": 9.10806081724238, |
|
"learning_rate": 2.237510857072013e-06, |
|
"loss": 0.6929, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 0.826969888959285, |
|
"grad_norm": 2.0779769959484438, |
|
"learning_rate": 2.204264161416265e-06, |
|
"loss": 0.6801, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.8283037113608323, |
|
"grad_norm": 5.811721585561884, |
|
"learning_rate": 2.171246722685354e-06, |
|
"loss": 0.6646, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.8296375337623796, |
|
"grad_norm": 2.5237647632697993, |
|
"learning_rate": 2.1384591324417e-06, |
|
"loss": 0.66, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 0.8309713561639268, |
|
"grad_norm": 2.5679386388987986, |
|
"learning_rate": 2.1059019781296073e-06, |
|
"loss": 0.6674, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 0.832305178565474, |
|
"grad_norm": 1.9554398513591067, |
|
"learning_rate": 2.0735758430647316e-06, |
|
"loss": 0.6719, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.8336390009670213, |
|
"grad_norm": 5.185838548176553, |
|
"learning_rate": 2.041481306423638e-06, |
|
"loss": 0.708, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.8349728233685685, |
|
"grad_norm": 3.1308842001803523, |
|
"learning_rate": 2.0096189432334194e-06, |
|
"loss": 0.6997, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 0.8363066457701157, |
|
"grad_norm": 2.3929519797200474, |
|
"learning_rate": 1.977989324361394e-06, |
|
"loss": 0.706, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 0.8376404681716629, |
|
"grad_norm": 1.4916686908701087, |
|
"learning_rate": 1.946593016504877e-06, |
|
"loss": 0.6761, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 0.8389742905732102, |
|
"grad_norm": 4.139454571827269, |
|
"learning_rate": 1.915430582181031e-06, |
|
"loss": 0.6654, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 0.8403081129747574, |
|
"grad_norm": 4.3252701691493, |
|
"learning_rate": 1.8845025797167792e-06, |
|
"loss": 0.6656, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.8416419353763046, |
|
"grad_norm": 2.512715902588831, |
|
"learning_rate": 1.8538095632388135e-06, |
|
"loss": 0.7088, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 0.8429757577778518, |
|
"grad_norm": 2.55125098116761, |
|
"learning_rate": 1.8233520826636563e-06, |
|
"loss": 0.6996, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 0.8443095801793992, |
|
"grad_norm": 9.147529276811154, |
|
"learning_rate": 1.7931306836878154e-06, |
|
"loss": 0.6894, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 0.8456434025809464, |
|
"grad_norm": 1.448576833860535, |
|
"learning_rate": 1.763145907777997e-06, |
|
"loss": 0.687, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 0.8469772249824936, |
|
"grad_norm": 2.025078656588097, |
|
"learning_rate": 1.7333982921614194e-06, |
|
"loss": 0.7409, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 0.8483110473840408, |
|
"grad_norm": 2.0074920992381062, |
|
"learning_rate": 1.703888369816174e-06, |
|
"loss": 0.6691, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.8496448697855881, |
|
"grad_norm": 2.1486420135259428, |
|
"learning_rate": 1.6746166694616821e-06, |
|
"loss": 0.729, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 0.8509786921871353, |
|
"grad_norm": 2.787722139958403, |
|
"learning_rate": 1.6455837155492198e-06, |
|
"loss": 0.6569, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 0.8523125145886825, |
|
"grad_norm": 5.294352693264194, |
|
"learning_rate": 1.616790028252526e-06, |
|
"loss": 0.6792, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 0.8536463369902297, |
|
"grad_norm": 3.2433926835727918, |
|
"learning_rate": 1.5882361234584758e-06, |
|
"loss": 0.6569, |
|
"step": 6400 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 7497, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 400, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.749485177756713e+19, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|