|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.19990004997501248, |
|
"eval_steps": 500, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 3.3340563149001086, |
|
"learning_rate": 0.0, |
|
"loss": 11.0, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.398812329952019, |
|
"learning_rate": 5.9999999999999995e-05, |
|
"loss": 10.125, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.394322446895115, |
|
"learning_rate": 0.00011999999999999999, |
|
"loss": 10.1172, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 1.9958816684399585, |
|
"learning_rate": 0.00017999999999999998, |
|
"loss": 9.875, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 1.8270465897882062, |
|
"learning_rate": 0.00023999999999999998, |
|
"loss": 9.6641, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.7854046471397795, |
|
"learning_rate": 0.0003, |
|
"loss": 9.4844, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.719416749115252, |
|
"learning_rate": 0.00035999999999999997, |
|
"loss": 9.3281, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.4637825746112274, |
|
"learning_rate": 0.00041999999999999996, |
|
"loss": 9.2109, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.4393631015406718, |
|
"learning_rate": 0.00047999999999999996, |
|
"loss": 8.9453, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.2936734586915988, |
|
"learning_rate": 0.00054, |
|
"loss": 8.7109, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.0756922378227356, |
|
"learning_rate": 0.0005999986405514987, |
|
"loss": 8.4609, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.9277829127413892, |
|
"learning_rate": 0.0005999945622196846, |
|
"loss": 8.2344, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.8084581786682467, |
|
"learning_rate": 0.0005999877650456265, |
|
"loss": 8.125, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.7635084596900947, |
|
"learning_rate": 0.000599978249097772, |
|
"loss": 7.9766, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.9186699644247788, |
|
"learning_rate": 0.0005999660144719463, |
|
"loss": 7.8555, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.6609504256551479, |
|
"learning_rate": 0.0005999510612913519, |
|
"loss": 7.7734, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.7086232844782971, |
|
"learning_rate": 0.0005999333897065673, |
|
"loss": 7.7148, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 16.38048851691348, |
|
"learning_rate": 0.0005999129998955453, |
|
"loss": 8.4844, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.3057527590449889, |
|
"learning_rate": 0.0005998898920636111, |
|
"loss": 7.7539, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.6966048242948986, |
|
"learning_rate": 0.00059986406644346, |
|
"loss": 7.75, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.6348089115348993, |
|
"learning_rate": 0.0005998355232951559, |
|
"loss": 7.7031, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.7829163518610293, |
|
"learning_rate": 0.0005998042629061279, |
|
"loss": 7.6992, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.5900591778980369, |
|
"learning_rate": 0.0005997702855911678, |
|
"loss": 7.6016, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.4655170213064256, |
|
"learning_rate": 0.0005997335916924268, |
|
"loss": 7.5977, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.6287348258915756, |
|
"learning_rate": 0.0005996941815794121, |
|
"loss": 7.5586, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.6137321903884564, |
|
"learning_rate": 0.0005996520556489831, |
|
"loss": 7.5898, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.44962562710631065, |
|
"learning_rate": 0.0005996072143253473, |
|
"loss": 7.4336, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.46130046454703316, |
|
"learning_rate": 0.0005995596580600566, |
|
"loss": 7.4023, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.4686712675731326, |
|
"learning_rate": 0.0005995093873320018, |
|
"loss": 7.3789, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.4672147564288997, |
|
"learning_rate": 0.0005994564026474087, |
|
"loss": 7.3711, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.40408354581233474, |
|
"learning_rate": 0.0005994007045398324, |
|
"loss": 7.3672, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.46032146732584733, |
|
"learning_rate": 0.0005993422935701524, |
|
"loss": 7.3477, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.4765534634593268, |
|
"learning_rate": 0.0005992811703265664, |
|
"loss": 7.3555, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.46208489386235113, |
|
"learning_rate": 0.0005992173354245849, |
|
"loss": 7.3047, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.2956144524964961, |
|
"learning_rate": 0.0005991507895070244, |
|
"loss": 7.3125, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.4834645389868856, |
|
"learning_rate": 0.0005990815332440017, |
|
"loss": 7.207, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.4411831350968505, |
|
"learning_rate": 0.0005990095673329266, |
|
"loss": 7.1758, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.24809297748968667, |
|
"learning_rate": 0.0005989348924984951, |
|
"loss": 7.2188, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.39402988416840584, |
|
"learning_rate": 0.0005988575094926817, |
|
"loss": 7.1953, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.3868345222189167, |
|
"learning_rate": 0.0005987774190947328, |
|
"loss": 7.1641, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.3777261230135448, |
|
"learning_rate": 0.0005986946221111575, |
|
"loss": 7.1328, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.4687511444077827, |
|
"learning_rate": 0.0005986091193757206, |
|
"loss": 7.0898, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.34935796211612463, |
|
"learning_rate": 0.0005985209117494337, |
|
"loss": 7.1367, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.38764476686849886, |
|
"learning_rate": 0.0005984300001205466, |
|
"loss": 7.125, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.3956487898882936, |
|
"learning_rate": 0.0005983363854045386, |
|
"loss": 7.1094, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.31140257544677513, |
|
"learning_rate": 0.0005982400685441084, |
|
"loss": 7.0898, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3664476570531787, |
|
"learning_rate": 0.0005981410505091662, |
|
"loss": 7.0664, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.31891741142945207, |
|
"learning_rate": 0.0005980393322968223, |
|
"loss": 7.0273, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.4533529037337155, |
|
"learning_rate": 0.0005979349149313778, |
|
"loss": 7.0586, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.30532331638835586, |
|
"learning_rate": 0.0005978277994643147, |
|
"loss": 7.0195, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.6501991746260075, |
|
"learning_rate": 0.0005977179869742844, |
|
"loss": 6.9648, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.43904455901717926, |
|
"learning_rate": 0.0005976054785670975, |
|
"loss": 6.9805, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.4826001598483571, |
|
"learning_rate": 0.0005974902753757124, |
|
"loss": 6.9297, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.2924998027034648, |
|
"learning_rate": 0.000597372378560224, |
|
"loss": 6.8984, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.4439033666380787, |
|
"learning_rate": 0.0005972517893078517, |
|
"loss": 6.8945, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.6135914255073411, |
|
"learning_rate": 0.0005971285088329284, |
|
"loss": 6.9727, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.5575686565598483, |
|
"learning_rate": 0.0005970025383768866, |
|
"loss": 6.9219, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.4820951675994578, |
|
"learning_rate": 0.0005968738792082478, |
|
"loss": 6.8516, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.40164190019465584, |
|
"learning_rate": 0.0005967425326226082, |
|
"loss": 6.7734, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.46129863945181293, |
|
"learning_rate": 0.0005966084999426265, |
|
"loss": 6.8125, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.33322355827118677, |
|
"learning_rate": 0.0005964717825180101, |
|
"loss": 6.7891, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.3847525153855558, |
|
"learning_rate": 0.0005963323817255024, |
|
"loss": 6.8242, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.3384433591375982, |
|
"learning_rate": 0.0005961902989688674, |
|
"loss": 6.707, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.3937003195165685, |
|
"learning_rate": 0.000596045535678877, |
|
"loss": 6.8203, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.35423488053528107, |
|
"learning_rate": 0.0005958980933132962, |
|
"loss": 6.7383, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.36005939745315396, |
|
"learning_rate": 0.0005957479733568675, |
|
"loss": 6.7109, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3499278317706933, |
|
"learning_rate": 0.0005955951773212976, |
|
"loss": 6.7266, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3708385192137018, |
|
"learning_rate": 0.0005954397067452407, |
|
"loss": 6.7617, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3775657656205869, |
|
"learning_rate": 0.0005952815631942839, |
|
"loss": 6.7148, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3040083750375816, |
|
"learning_rate": 0.0005951207482609307, |
|
"loss": 6.5938, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3443020808841468, |
|
"learning_rate": 0.0005949572635645861, |
|
"loss": 6.6523, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3520066316939, |
|
"learning_rate": 0.0005947911107515389, |
|
"loss": 6.6211, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3739040572679613, |
|
"learning_rate": 0.0005946222914949462, |
|
"loss": 6.5547, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.34890731989025553, |
|
"learning_rate": 0.000594450807494816, |
|
"loss": 6.5859, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.40910932350136514, |
|
"learning_rate": 0.0005942766604779903, |
|
"loss": 6.5547, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.5698342865852906, |
|
"learning_rate": 0.0005940998521981274, |
|
"loss": 6.457, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.5179452709555474, |
|
"learning_rate": 0.0005939203844356852, |
|
"loss": 6.5547, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.5222512938673792, |
|
"learning_rate": 0.0005937382589979016, |
|
"loss": 6.5039, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.5682332793686307, |
|
"learning_rate": 0.0005935534777187781, |
|
"loss": 6.5547, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3869287710460676, |
|
"learning_rate": 0.0005933660424590598, |
|
"loss": 6.5156, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3078211032807607, |
|
"learning_rate": 0.000593175955106218, |
|
"loss": 6.4258, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3611357511872241, |
|
"learning_rate": 0.00059298321757443, |
|
"loss": 6.4727, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.29633467844266953, |
|
"learning_rate": 0.0005927878318045608, |
|
"loss": 6.3281, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3257574200776832, |
|
"learning_rate": 0.0005925897997641426, |
|
"loss": 6.3203, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.2824054533852328, |
|
"learning_rate": 0.0005923891234473562, |
|
"loss": 6.4062, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3056199770204573, |
|
"learning_rate": 0.0005921858048750097, |
|
"loss": 6.3984, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.2966438824341908, |
|
"learning_rate": 0.000591979846094519, |
|
"loss": 6.3555, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.32782438676663733, |
|
"learning_rate": 0.0005917712491798866, |
|
"loss": 6.4023, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3538316399620157, |
|
"learning_rate": 0.0005915600162316811, |
|
"loss": 6.2812, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.375858298192913, |
|
"learning_rate": 0.0005913461493770162, |
|
"loss": 6.3086, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.5189251339815161, |
|
"learning_rate": 0.0005911296507695284, |
|
"loss": 6.2812, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.6304909542669104, |
|
"learning_rate": 0.0005909105225893564, |
|
"loss": 6.2969, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.4655662819622591, |
|
"learning_rate": 0.0005906887670431187, |
|
"loss": 6.1953, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.39035390983920965, |
|
"learning_rate": 0.000590464386363891, |
|
"loss": 6.2617, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.4918417851770978, |
|
"learning_rate": 0.0005902373828111843, |
|
"loss": 6.2148, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.35670770889552555, |
|
"learning_rate": 0.0005900077586709219, |
|
"loss": 6.2461, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.4177985869939347, |
|
"learning_rate": 0.0005897755162554163, |
|
"loss": 6.1797, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3742471130708234, |
|
"learning_rate": 0.000589540657903346, |
|
"loss": 6.1406, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.28627666723978284, |
|
"learning_rate": 0.0005893031859797322, |
|
"loss": 6.2031, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.32238563846046103, |
|
"learning_rate": 0.0005890631028759143, |
|
"loss": 6.0625, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.2556625657587849, |
|
"learning_rate": 0.0005888204110095265, |
|
"loss": 6.1797, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.35463629701710253, |
|
"learning_rate": 0.0005885751128244734, |
|
"loss": 6.125, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.31975770214936095, |
|
"learning_rate": 0.0005883272107909048, |
|
"loss": 6.1836, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3464621815245048, |
|
"learning_rate": 0.0005880767074051915, |
|
"loss": 6.125, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3663428920796654, |
|
"learning_rate": 0.0005878236051898998, |
|
"loss": 6.0781, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.31594460565215293, |
|
"learning_rate": 0.0005875679066937664, |
|
"loss": 6.082, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3552617109396582, |
|
"learning_rate": 0.000587309614491672, |
|
"loss": 6.1016, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.307016409692456, |
|
"learning_rate": 0.0005870487311846164, |
|
"loss": 6.1406, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.32188902148474213, |
|
"learning_rate": 0.0005867852593996914, |
|
"loss": 6.0039, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.25501199715105083, |
|
"learning_rate": 0.0005865192017900551, |
|
"loss": 6.0938, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3416203070024056, |
|
"learning_rate": 0.0005862505610349049, |
|
"loss": 6.0234, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3562508875852537, |
|
"learning_rate": 0.0005859793398394498, |
|
"loss": 6.0469, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.4443953757302568, |
|
"learning_rate": 0.0005857055409348845, |
|
"loss": 5.9766, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.42023839332714596, |
|
"learning_rate": 0.0005854291670783607, |
|
"loss": 6.0781, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.4618323255809241, |
|
"learning_rate": 0.0005851502210529604, |
|
"loss": 5.9727, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.379195014798667, |
|
"learning_rate": 0.0005848687056676668, |
|
"loss": 5.9922, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3931552573296799, |
|
"learning_rate": 0.0005845846237573366, |
|
"loss": 5.9492, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.2567080044949908, |
|
"learning_rate": 0.0005842979781826717, |
|
"loss": 6.0273, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.4190305965377807, |
|
"learning_rate": 0.0005840087718301895, |
|
"loss": 6.0391, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3996803869430228, |
|
"learning_rate": 0.0005837170076121951, |
|
"loss": 5.9531, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.478219248015785, |
|
"learning_rate": 0.000583422688466751, |
|
"loss": 6.0586, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.40869844309811526, |
|
"learning_rate": 0.0005831258173576474, |
|
"loss": 6.0117, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3728598080697978, |
|
"learning_rate": 0.0005828263972743733, |
|
"loss": 5.9375, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3560055462882015, |
|
"learning_rate": 0.0005825244312320856, |
|
"loss": 5.9531, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.40446932887864323, |
|
"learning_rate": 0.0005822199222715787, |
|
"loss": 5.9609, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.38514065739946723, |
|
"learning_rate": 0.000581912873459255, |
|
"loss": 5.8594, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.35367576386319416, |
|
"learning_rate": 0.0005816032878870921, |
|
"loss": 5.9023, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3341681995122829, |
|
"learning_rate": 0.0005812911686726135, |
|
"loss": 5.9062, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3387022688975784, |
|
"learning_rate": 0.0005809765189588563, |
|
"loss": 5.8945, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.31638659898934757, |
|
"learning_rate": 0.0005806593419143395, |
|
"loss": 5.8242, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3229678508227436, |
|
"learning_rate": 0.0005803396407330325, |
|
"loss": 5.8516, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.35499490868584455, |
|
"learning_rate": 0.0005800174186343226, |
|
"loss": 5.9258, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.40753171542848754, |
|
"learning_rate": 0.0005796926788629828, |
|
"loss": 5.8242, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3625374018348824, |
|
"learning_rate": 0.0005793654246891389, |
|
"loss": 5.832, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3583489573569317, |
|
"learning_rate": 0.000579035659408237, |
|
"loss": 5.8398, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.39657706318861896, |
|
"learning_rate": 0.0005787033863410095, |
|
"loss": 5.8633, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3965837889564036, |
|
"learning_rate": 0.0005783686088334428, |
|
"loss": 5.8633, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.29496474301865566, |
|
"learning_rate": 0.0005780313302567424, |
|
"loss": 5.8203, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.44637192639243695, |
|
"learning_rate": 0.0005776915540073001, |
|
"loss": 5.8477, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.39605473508683114, |
|
"learning_rate": 0.0005773492835066587, |
|
"loss": 5.7383, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3008962634266945, |
|
"learning_rate": 0.0005770045222014786, |
|
"loss": 5.7617, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.36915495506607826, |
|
"learning_rate": 0.0005766572735635022, |
|
"loss": 5.7695, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3282300349560706, |
|
"learning_rate": 0.0005763075410895193, |
|
"loss": 5.8281, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.2747449814083844, |
|
"learning_rate": 0.0005759553283013323, |
|
"loss": 5.7812, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.28905882704179764, |
|
"learning_rate": 0.00057560063874572, |
|
"loss": 5.7344, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.280625988867192, |
|
"learning_rate": 0.000575243475994402, |
|
"loss": 5.7773, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.41061863948012467, |
|
"learning_rate": 0.0005748838436440035, |
|
"loss": 5.7578, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.4920152483870267, |
|
"learning_rate": 0.0005745217453160183, |
|
"loss": 5.7305, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.5463207978955044, |
|
"learning_rate": 0.0005741571846567725, |
|
"loss": 5.7383, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3986359831157306, |
|
"learning_rate": 0.0005737901653373878, |
|
"loss": 5.668, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.37908758170100293, |
|
"learning_rate": 0.0005734206910537447, |
|
"loss": 5.6875, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.35929793070492694, |
|
"learning_rate": 0.0005730487655264451, |
|
"loss": 5.7188, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.4217799574145456, |
|
"learning_rate": 0.0005726743925007751, |
|
"loss": 5.7305, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.4024411981587195, |
|
"learning_rate": 0.0005722975757466667, |
|
"loss": 5.6289, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3472391905877033, |
|
"learning_rate": 0.0005719183190586606, |
|
"loss": 5.6523, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.31752956812138816, |
|
"learning_rate": 0.0005715366262558675, |
|
"loss": 5.6172, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.3170152384332457, |
|
"learning_rate": 0.0005711525011819294, |
|
"loss": 5.6172, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.40520629326601837, |
|
"learning_rate": 0.0005707659477049818, |
|
"loss": 5.625, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.3965976910198806, |
|
"learning_rate": 0.0005703769697176137, |
|
"loss": 5.6562, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.40422960541801994, |
|
"learning_rate": 0.0005699855711368293, |
|
"loss": 5.6836, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.3780813184050647, |
|
"learning_rate": 0.0005695917559040079, |
|
"loss": 5.5938, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.36917638857736573, |
|
"learning_rate": 0.0005691955279848645, |
|
"loss": 5.668, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.37769176081037814, |
|
"learning_rate": 0.0005687968913694098, |
|
"loss": 5.4961, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.3255116524991148, |
|
"learning_rate": 0.0005683958500719103, |
|
"loss": 5.5117, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.31897629016848805, |
|
"learning_rate": 0.0005679924081308471, |
|
"loss": 5.5664, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.2869064236553046, |
|
"learning_rate": 0.0005675865696088764, |
|
"loss": 5.5391, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.29226729022634845, |
|
"learning_rate": 0.0005671783385927873, |
|
"loss": 5.5586, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.2534117210955766, |
|
"learning_rate": 0.0005667677191934618, |
|
"loss": 5.5312, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.289828484125484, |
|
"learning_rate": 0.0005663547155458326, |
|
"loss": 5.6484, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.2717242930342115, |
|
"learning_rate": 0.0005659393318088419, |
|
"loss": 5.5352, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3595538109137759, |
|
"learning_rate": 0.0005655215721653993, |
|
"loss": 5.5742, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.4255054350471108, |
|
"learning_rate": 0.0005651014408223398, |
|
"loss": 5.5469, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3670561941219979, |
|
"learning_rate": 0.0005646789420103814, |
|
"loss": 5.5078, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.40280130904983164, |
|
"learning_rate": 0.0005642540799840822, |
|
"loss": 5.5, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.41159472035983025, |
|
"learning_rate": 0.0005638268590217984, |
|
"loss": 5.5039, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.4316778037513652, |
|
"learning_rate": 0.0005633972834256401, |
|
"loss": 5.5352, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.5674781128363939, |
|
"learning_rate": 0.000562965357521429, |
|
"loss": 5.4336, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.41654662151365446, |
|
"learning_rate": 0.0005625310856586541, |
|
"loss": 5.6211, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.5159976364107484, |
|
"learning_rate": 0.0005620944722104282, |
|
"loss": 5.4844, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.34364678177014185, |
|
"learning_rate": 0.0005616555215734438, |
|
"loss": 5.4922, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3708077784459011, |
|
"learning_rate": 0.0005612142381679289, |
|
"loss": 5.5234, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3620051253453866, |
|
"learning_rate": 0.0005607706264376028, |
|
"loss": 5.4961, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.34735585210929654, |
|
"learning_rate": 0.0005603246908496305, |
|
"loss": 5.4453, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.37719874705792217, |
|
"learning_rate": 0.0005598764358945783, |
|
"loss": 5.4844, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3749130664831207, |
|
"learning_rate": 0.0005594258660863689, |
|
"loss": 5.4648, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.40951353306235827, |
|
"learning_rate": 0.0005589729859622351, |
|
"loss": 5.5039, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.40146882563949804, |
|
"learning_rate": 0.0005585178000826745, |
|
"loss": 5.3672, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.4062987628428303, |
|
"learning_rate": 0.0005580603130314043, |
|
"loss": 5.3984, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.35626322654799136, |
|
"learning_rate": 0.0005576005294153138, |
|
"loss": 5.3984, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3140647930801716, |
|
"learning_rate": 0.0005571384538644188, |
|
"loss": 5.3906, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.2990060538353662, |
|
"learning_rate": 0.0005566740910318153, |
|
"loss": 5.3711, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3337525907515936, |
|
"learning_rate": 0.0005562074455936315, |
|
"loss": 5.4023, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3381587051014816, |
|
"learning_rate": 0.000555738522248982, |
|
"loss": 5.4414, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.2954008999469894, |
|
"learning_rate": 0.0005552673257199197, |
|
"loss": 5.418, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3242310900810155, |
|
"learning_rate": 0.0005547938607513882, |
|
"loss": 5.418, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.3149021804393678, |
|
"learning_rate": 0.0005543181321111747, |
|
"loss": 5.4375, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.32859412218218814, |
|
"learning_rate": 0.0005538401445898612, |
|
"loss": 5.4492, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.2960282598050701, |
|
"learning_rate": 0.0005533599030007768, |
|
"loss": 5.3867, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.2866762878199755, |
|
"learning_rate": 0.0005528774121799489, |
|
"loss": 5.3789, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.34865216327038784, |
|
"learning_rate": 0.0005523926769860549, |
|
"loss": 5.3711, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 0.0, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|