|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.13461214874642435, |
|
"eval_steps": 500, |
|
"global_step": 800, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0016826518593303045, |
|
"grad_norm": 3.75418950341011, |
|
"learning_rate": 4.9999999999999996e-06, |
|
"loss": 0.9983, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.003365303718660609, |
|
"grad_norm": 4.027030925274863, |
|
"learning_rate": 9.999999999999999e-06, |
|
"loss": 0.9697, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.005047955577990914, |
|
"grad_norm": 4.048987349136423, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.9412, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.006730607437321218, |
|
"grad_norm": 5.720158971431411, |
|
"learning_rate": 1.9999999999999998e-05, |
|
"loss": 0.8783, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.008413259296651522, |
|
"grad_norm": 4.718965032869529, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.8454, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.010095911155981827, |
|
"grad_norm": 3.5785181087788835, |
|
"learning_rate": 3e-05, |
|
"loss": 0.809, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.011778563015312132, |
|
"grad_norm": 4.11981684712826, |
|
"learning_rate": 2.9999786123888308e-05, |
|
"loss": 0.7556, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.013461214874642436, |
|
"grad_norm": 6.082559649594005, |
|
"learning_rate": 2.9999144501652298e-05, |
|
"loss": 0.7613, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.01514386673397274, |
|
"grad_norm": 1.957553999291205, |
|
"learning_rate": 2.9998075151588992e-05, |
|
"loss": 0.7784, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.016826518593303044, |
|
"grad_norm": 1.6706087540201593, |
|
"learning_rate": 2.999657810419285e-05, |
|
"loss": 0.7658, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01850917045263335, |
|
"grad_norm": 2.909734954037323, |
|
"learning_rate": 2.999465340215489e-05, |
|
"loss": 0.7331, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.020191822311963654, |
|
"grad_norm": 1.977272298268717, |
|
"learning_rate": 2.999230110036149e-05, |
|
"loss": 0.7507, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.02187447417129396, |
|
"grad_norm": 1.8089524113272115, |
|
"learning_rate": 2.99895212658928e-05, |
|
"loss": 0.7309, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.023557126030624265, |
|
"grad_norm": 2.134962179309057, |
|
"learning_rate": 2.9986313978020846e-05, |
|
"loss": 0.721, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.02523977788995457, |
|
"grad_norm": 11.10353091330302, |
|
"learning_rate": 2.9982679328207262e-05, |
|
"loss": 0.7338, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.02692242974928487, |
|
"grad_norm": 1.4444344817739057, |
|
"learning_rate": 2.9978617420100692e-05, |
|
"loss": 0.7227, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.028605081608615177, |
|
"grad_norm": 1.453288161439029, |
|
"learning_rate": 2.9974128369533805e-05, |
|
"loss": 0.7107, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.03028773346794548, |
|
"grad_norm": 3.475164856876678, |
|
"learning_rate": 2.9969212304520034e-05, |
|
"loss": 0.7303, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.03197038532727579, |
|
"grad_norm": 1.1636824531496957, |
|
"learning_rate": 2.9963869365249895e-05, |
|
"loss": 0.6688, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.03365303718660609, |
|
"grad_norm": 1.8518695174363622, |
|
"learning_rate": 2.995809970408699e-05, |
|
"loss": 0.7003, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.0353356890459364, |
|
"grad_norm": 4.09791760479377, |
|
"learning_rate": 2.9951903485563685e-05, |
|
"loss": 0.7442, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.0370183409052667, |
|
"grad_norm": 2.4987929291159956, |
|
"learning_rate": 2.99452808863764e-05, |
|
"loss": 0.7517, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.03870099276459701, |
|
"grad_norm": 3.4584802037194087, |
|
"learning_rate": 2.993823209538056e-05, |
|
"loss": 0.7537, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.04038364462392731, |
|
"grad_norm": 2.511130636368107, |
|
"learning_rate": 2.9930757313585238e-05, |
|
"loss": 0.7599, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.04206629648325761, |
|
"grad_norm": 1.7030446444812277, |
|
"learning_rate": 2.9922856754147406e-05, |
|
"loss": 0.7126, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.04374894834258792, |
|
"grad_norm": 4.790377413030976, |
|
"learning_rate": 2.9914530642365852e-05, |
|
"loss": 0.72, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.04543160020191822, |
|
"grad_norm": 2.0321244924961976, |
|
"learning_rate": 2.990577921567476e-05, |
|
"loss": 0.6733, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.04711425206124853, |
|
"grad_norm": 2.310370624749643, |
|
"learning_rate": 2.989660272363696e-05, |
|
"loss": 0.7212, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.04879690392057883, |
|
"grad_norm": 3.451763592410144, |
|
"learning_rate": 2.988700142793676e-05, |
|
"loss": 0.7237, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.05047955577990914, |
|
"grad_norm": 5.317302731978485, |
|
"learning_rate": 2.9876975602372536e-05, |
|
"loss": 0.7558, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.05216220763923944, |
|
"grad_norm": 2.3026448136142914, |
|
"learning_rate": 2.9866525532848906e-05, |
|
"loss": 0.6985, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.05384485949856974, |
|
"grad_norm": 1.8320545447196381, |
|
"learning_rate": 2.9855651517368567e-05, |
|
"loss": 0.7227, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.05552751135790005, |
|
"grad_norm": 1.9908218789466392, |
|
"learning_rate": 2.9844353866023802e-05, |
|
"loss": 0.7075, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.05721016321723035, |
|
"grad_norm": 5.182840115712529, |
|
"learning_rate": 2.9832632900987642e-05, |
|
"loss": 0.7207, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.05889281507656066, |
|
"grad_norm": 1.5483797249278837, |
|
"learning_rate": 2.982048895650468e-05, |
|
"loss": 0.7233, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.06057546693589096, |
|
"grad_norm": 2.3382590504722693, |
|
"learning_rate": 2.9807922378881537e-05, |
|
"loss": 0.7002, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.06225811879522127, |
|
"grad_norm": 3.1859655239636937, |
|
"learning_rate": 2.979493352647697e-05, |
|
"loss": 0.7201, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.06394077065455157, |
|
"grad_norm": 0.9149159742557087, |
|
"learning_rate": 2.9781522769691686e-05, |
|
"loss": 0.7136, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.06562342251388188, |
|
"grad_norm": 10.861566072795899, |
|
"learning_rate": 2.9767690490957758e-05, |
|
"loss": 0.7068, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.06730607437321218, |
|
"grad_norm": 2.8618866775651006, |
|
"learning_rate": 2.9753437084727713e-05, |
|
"loss": 0.7239, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.06898872623254249, |
|
"grad_norm": 2.8726068570785097, |
|
"learning_rate": 2.9738762957463292e-05, |
|
"loss": 0.7245, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.0706713780918728, |
|
"grad_norm": 2.4481298042739112, |
|
"learning_rate": 2.9723668527623877e-05, |
|
"loss": 0.7752, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.0723540299512031, |
|
"grad_norm": 1.8599931346602536, |
|
"learning_rate": 2.9708154225654526e-05, |
|
"loss": 0.7323, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.0740366818105334, |
|
"grad_norm": 1.2855737813743626, |
|
"learning_rate": 2.9692220493973712e-05, |
|
"loss": 0.7037, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.0757193336698637, |
|
"grad_norm": 4.629091463528233, |
|
"learning_rate": 2.9675867786960718e-05, |
|
"loss": 0.6867, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.07740198552919401, |
|
"grad_norm": 6.294427059845777, |
|
"learning_rate": 2.9659096570942654e-05, |
|
"loss": 0.7272, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.07908463738852431, |
|
"grad_norm": 2.4758348810051345, |
|
"learning_rate": 2.9641907324181194e-05, |
|
"loss": 0.6779, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.08076728924785462, |
|
"grad_norm": 1.3455245255212915, |
|
"learning_rate": 2.96243005368589e-05, |
|
"loss": 0.7051, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.08244994110718493, |
|
"grad_norm": 4.796150475871981, |
|
"learning_rate": 2.960627671106527e-05, |
|
"loss": 0.7547, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.08413259296651522, |
|
"grad_norm": 2.684441445075641, |
|
"learning_rate": 2.9587836360782405e-05, |
|
"loss": 0.709, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.08581524482584553, |
|
"grad_norm": 1.3869329152815553, |
|
"learning_rate": 2.9568980011870357e-05, |
|
"loss": 0.7073, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.08749789668517584, |
|
"grad_norm": 2.5576974478207197, |
|
"learning_rate": 2.954970820205214e-05, |
|
"loss": 0.6918, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.08918054854450615, |
|
"grad_norm": 1.1525450967004647, |
|
"learning_rate": 2.9530021480898393e-05, |
|
"loss": 0.6698, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.09086320040383644, |
|
"grad_norm": 2.847083851829901, |
|
"learning_rate": 2.9509920409811696e-05, |
|
"loss": 0.671, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.09254585226316675, |
|
"grad_norm": 2.561042091789346, |
|
"learning_rate": 2.9489405562010565e-05, |
|
"loss": 0.75, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.09422850412249706, |
|
"grad_norm": 4.458337350053255, |
|
"learning_rate": 2.9468477522513132e-05, |
|
"loss": 0.7277, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.09591115598182735, |
|
"grad_norm": 3.114622509219852, |
|
"learning_rate": 2.9447136888120408e-05, |
|
"loss": 0.6967, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.09759380784115766, |
|
"grad_norm": 1.6295210229360877, |
|
"learning_rate": 2.9425384267399327e-05, |
|
"loss": 0.6867, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.09927645970048797, |
|
"grad_norm": 1.7579117810504754, |
|
"learning_rate": 2.940322028066534e-05, |
|
"loss": 0.7236, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.10095911155981828, |
|
"grad_norm": 1.788183804411441, |
|
"learning_rate": 2.938064555996476e-05, |
|
"loss": 0.6864, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.10264176341914857, |
|
"grad_norm": 2.8340511721646373, |
|
"learning_rate": 2.9357660749056713e-05, |
|
"loss": 0.6847, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.10432441527847888, |
|
"grad_norm": 2.5230840193297985, |
|
"learning_rate": 2.9334266503394803e-05, |
|
"loss": 0.6889, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.10600706713780919, |
|
"grad_norm": 7.346086885083334, |
|
"learning_rate": 2.9310463490108397e-05, |
|
"loss": 0.7419, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.10768971899713949, |
|
"grad_norm": 2.356832890545339, |
|
"learning_rate": 2.928625238798362e-05, |
|
"loss": 0.7369, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.1093723708564698, |
|
"grad_norm": 2.4978380391841095, |
|
"learning_rate": 2.9261633887443993e-05, |
|
"loss": 0.6948, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.1110550227158001, |
|
"grad_norm": 3.535487375505793, |
|
"learning_rate": 2.9236608690530738e-05, |
|
"loss": 0.7081, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.11273767457513041, |
|
"grad_norm": 2.522638625540884, |
|
"learning_rate": 2.921117751088276e-05, |
|
"loss": 0.7191, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.1144203264344607, |
|
"grad_norm": 3.055823541699581, |
|
"learning_rate": 2.91853410737163e-05, |
|
"loss": 0.74, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.11610297829379101, |
|
"grad_norm": 3.270117047516123, |
|
"learning_rate": 2.915910011580426e-05, |
|
"loss": 0.6829, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.11778563015312132, |
|
"grad_norm": 2.3219806056695367, |
|
"learning_rate": 2.9132455385455176e-05, |
|
"loss": 0.7062, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.11946828201245162, |
|
"grad_norm": 1.541921603113568, |
|
"learning_rate": 2.9105407642491895e-05, |
|
"loss": 0.7217, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.12115093387178193, |
|
"grad_norm": 1.557595298876376, |
|
"learning_rate": 2.907795765822989e-05, |
|
"loss": 0.7083, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.12283358573111224, |
|
"grad_norm": 2.3829156571868753, |
|
"learning_rate": 2.9050106215455283e-05, |
|
"loss": 0.6992, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.12451623759044254, |
|
"grad_norm": 7.536777098548366, |
|
"learning_rate": 2.9021854108402516e-05, |
|
"loss": 0.7248, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.12619888944977284, |
|
"grad_norm": 1.3408030642895519, |
|
"learning_rate": 2.8993202142731693e-05, |
|
"loss": 0.6375, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.12788154130910315, |
|
"grad_norm": 2.4880776314537254, |
|
"learning_rate": 2.8964151135505616e-05, |
|
"loss": 0.7063, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.12956419316843346, |
|
"grad_norm": 1.5507053769862247, |
|
"learning_rate": 2.8934701915166477e-05, |
|
"loss": 0.73, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.13124684502776376, |
|
"grad_norm": 3.5622930633942564, |
|
"learning_rate": 2.890485532151225e-05, |
|
"loss": 0.7521, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.13292949688709407, |
|
"grad_norm": 4.188153799459233, |
|
"learning_rate": 2.887461220567271e-05, |
|
"loss": 0.6841, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.13461214874642435, |
|
"grad_norm": 2.702901312773331, |
|
"learning_rate": 2.8843973430085204e-05, |
|
"loss": 0.694, |
|
"step": 800 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 5943, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 400, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.644760786993152e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|