tinygpt2-javanese / trainer_state.json
akahana's picture
Training in progress, step 100500
24f465b verified
raw
history blame
35.9 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 20.0,
"eval_steps": 500,
"global_step": 100280,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0997207818109294,
"grad_norm": 0.9261826276779175,
"learning_rate": 4.975069804547268e-05,
"loss": 9.3488,
"step": 500
},
{
"epoch": 0.1994415636218588,
"grad_norm": 0.5372676253318787,
"learning_rate": 4.9501396090945354e-05,
"loss": 8.0556,
"step": 1000
},
{
"epoch": 0.2991623454327882,
"grad_norm": 0.5985580086708069,
"learning_rate": 4.925209413641803e-05,
"loss": 7.7905,
"step": 1500
},
{
"epoch": 0.3988831272437176,
"grad_norm": 0.7738422751426697,
"learning_rate": 4.900279218189071e-05,
"loss": 7.5996,
"step": 2000
},
{
"epoch": 0.49860390905464697,
"grad_norm": 0.8470643162727356,
"learning_rate": 4.875349022736339e-05,
"loss": 7.4439,
"step": 2500
},
{
"epoch": 0.5983246908655764,
"grad_norm": 0.9632411003112793,
"learning_rate": 4.850418827283606e-05,
"loss": 7.2826,
"step": 3000
},
{
"epoch": 0.6980454726765057,
"grad_norm": 0.9540568590164185,
"learning_rate": 4.825488631830874e-05,
"loss": 7.1712,
"step": 3500
},
{
"epoch": 0.7977662544874352,
"grad_norm": 0.9605555534362793,
"learning_rate": 4.8005584363781416e-05,
"loss": 7.0881,
"step": 4000
},
{
"epoch": 0.8974870362983646,
"grad_norm": 1.101860523223877,
"learning_rate": 4.775678101316314e-05,
"loss": 7.0052,
"step": 4500
},
{
"epoch": 0.9972078181092939,
"grad_norm": 1.8117992877960205,
"learning_rate": 4.750747905863582e-05,
"loss": 6.9103,
"step": 5000
},
{
"epoch": 1.0969285999202234,
"grad_norm": 1.2837625741958618,
"learning_rate": 4.72581771041085e-05,
"loss": 6.8231,
"step": 5500
},
{
"epoch": 1.1966493817311528,
"grad_norm": 1.3095475435256958,
"learning_rate": 4.7008875149581175e-05,
"loss": 6.7796,
"step": 6000
},
{
"epoch": 1.2963701635420821,
"grad_norm": 1.4652228355407715,
"learning_rate": 4.6760071798962906e-05,
"loss": 6.707,
"step": 6500
},
{
"epoch": 1.3960909453530115,
"grad_norm": 1.357987403869629,
"learning_rate": 4.651076984443558e-05,
"loss": 6.6568,
"step": 7000
},
{
"epoch": 1.4958117271639408,
"grad_norm": 1.31229829788208,
"learning_rate": 4.626146788990826e-05,
"loss": 6.6144,
"step": 7500
},
{
"epoch": 1.5955325089748702,
"grad_norm": 1.4246258735656738,
"learning_rate": 4.6012165935380934e-05,
"loss": 6.5669,
"step": 8000
},
{
"epoch": 1.6952532907857998,
"grad_norm": 1.3963290452957153,
"learning_rate": 4.576286398085361e-05,
"loss": 6.508,
"step": 8500
},
{
"epoch": 1.7949740725967291,
"grad_norm": 1.5991405248641968,
"learning_rate": 4.551406063023534e-05,
"loss": 6.4651,
"step": 9000
},
{
"epoch": 1.8946948544076585,
"grad_norm": 1.6727421283721924,
"learning_rate": 4.526475867570802e-05,
"loss": 6.4129,
"step": 9500
},
{
"epoch": 1.994415636218588,
"grad_norm": 1.5657908916473389,
"learning_rate": 4.50154567211807e-05,
"loss": 6.3859,
"step": 10000
},
{
"epoch": 2.0941364180295174,
"grad_norm": 1.4079279899597168,
"learning_rate": 4.4766154766653375e-05,
"loss": 6.3136,
"step": 10500
},
{
"epoch": 2.193857199840447,
"grad_norm": 1.4724199771881104,
"learning_rate": 4.45173514160351e-05,
"loss": 6.2935,
"step": 11000
},
{
"epoch": 2.293577981651376,
"grad_norm": 1.7075014114379883,
"learning_rate": 4.426804946150778e-05,
"loss": 6.2724,
"step": 11500
},
{
"epoch": 2.3932987634623055,
"grad_norm": 1.550031304359436,
"learning_rate": 4.401874750698046e-05,
"loss": 6.2526,
"step": 12000
},
{
"epoch": 2.493019545273235,
"grad_norm": 1.5938421487808228,
"learning_rate": 4.376944555245313e-05,
"loss": 6.2218,
"step": 12500
},
{
"epoch": 2.5927403270841642,
"grad_norm": 1.853521704673767,
"learning_rate": 4.3520642201834866e-05,
"loss": 6.1679,
"step": 13000
},
{
"epoch": 2.6924611088950936,
"grad_norm": 1.7296770811080933,
"learning_rate": 4.327134024730754e-05,
"loss": 6.1442,
"step": 13500
},
{
"epoch": 2.792181890706023,
"grad_norm": 1.752852201461792,
"learning_rate": 4.302203829278022e-05,
"loss": 6.1241,
"step": 14000
},
{
"epoch": 2.8919026725169523,
"grad_norm": 1.6265596151351929,
"learning_rate": 4.2772736338252893e-05,
"loss": 6.0973,
"step": 14500
},
{
"epoch": 2.9916234543278817,
"grad_norm": 1.7537871599197388,
"learning_rate": 4.2523932987634625e-05,
"loss": 6.0858,
"step": 15000
},
{
"epoch": 3.0913442361388115,
"grad_norm": 1.5949361324310303,
"learning_rate": 4.22746310331073e-05,
"loss": 6.0188,
"step": 15500
},
{
"epoch": 3.191065017949741,
"grad_norm": 1.907575011253357,
"learning_rate": 4.202532907857998e-05,
"loss": 6.0118,
"step": 16000
},
{
"epoch": 3.29078579976067,
"grad_norm": 1.8919939994812012,
"learning_rate": 4.177602712405265e-05,
"loss": 5.9883,
"step": 16500
},
{
"epoch": 3.3905065815715996,
"grad_norm": 1.8701094388961792,
"learning_rate": 4.1527223773434384e-05,
"loss": 5.9725,
"step": 17000
},
{
"epoch": 3.490227363382529,
"grad_norm": 2.043443202972412,
"learning_rate": 4.127792181890706e-05,
"loss": 5.9474,
"step": 17500
},
{
"epoch": 3.5899481451934583,
"grad_norm": 1.852910041809082,
"learning_rate": 4.102861986437974e-05,
"loss": 5.9103,
"step": 18000
},
{
"epoch": 3.6896689270043876,
"grad_norm": 1.7579346895217896,
"learning_rate": 4.077931790985242e-05,
"loss": 5.9047,
"step": 18500
},
{
"epoch": 3.789389708815317,
"grad_norm": 1.8655468225479126,
"learning_rate": 4.053051455923414e-05,
"loss": 5.9043,
"step": 19000
},
{
"epoch": 3.8891104906262464,
"grad_norm": 2.0585408210754395,
"learning_rate": 4.0281212604706826e-05,
"loss": 5.8705,
"step": 19500
},
{
"epoch": 3.988831272437176,
"grad_norm": 2.088595151901245,
"learning_rate": 4.00319106501795e-05,
"loss": 5.8608,
"step": 20000
},
{
"epoch": 4.0885520542481055,
"grad_norm": 1.8989760875701904,
"learning_rate": 3.978260869565217e-05,
"loss": 5.8265,
"step": 20500
},
{
"epoch": 4.188272836059035,
"grad_norm": 1.8559260368347168,
"learning_rate": 3.953330674112485e-05,
"loss": 5.8052,
"step": 21000
},
{
"epoch": 4.287993617869964,
"grad_norm": 2.0016090869903564,
"learning_rate": 3.9284503390506585e-05,
"loss": 5.7947,
"step": 21500
},
{
"epoch": 4.387714399680894,
"grad_norm": 2.0224192142486572,
"learning_rate": 3.903520143597926e-05,
"loss": 5.7605,
"step": 22000
},
{
"epoch": 4.487435181491823,
"grad_norm": 1.9289922714233398,
"learning_rate": 3.8785899481451936e-05,
"loss": 5.7315,
"step": 22500
},
{
"epoch": 4.587155963302752,
"grad_norm": 2.1070337295532227,
"learning_rate": 3.853659752692461e-05,
"loss": 5.7517,
"step": 23000
},
{
"epoch": 4.686876745113682,
"grad_norm": 2.091681718826294,
"learning_rate": 3.8287794176306343e-05,
"loss": 5.7206,
"step": 23500
},
{
"epoch": 4.786597526924611,
"grad_norm": 2.0619523525238037,
"learning_rate": 3.803849222177902e-05,
"loss": 5.6829,
"step": 24000
},
{
"epoch": 4.88631830873554,
"grad_norm": 2.1252663135528564,
"learning_rate": 3.7789190267251695e-05,
"loss": 5.7028,
"step": 24500
},
{
"epoch": 4.98603909054647,
"grad_norm": 2.179452657699585,
"learning_rate": 3.753988831272438e-05,
"loss": 5.6975,
"step": 25000
},
{
"epoch": 5.085759872357399,
"grad_norm": 2.054488182067871,
"learning_rate": 3.72910849621061e-05,
"loss": 5.6527,
"step": 25500
},
{
"epoch": 5.1854806541683285,
"grad_norm": 2.3839542865753174,
"learning_rate": 3.704178300757878e-05,
"loss": 5.6498,
"step": 26000
},
{
"epoch": 5.285201435979258,
"grad_norm": 1.9893797636032104,
"learning_rate": 3.679248105305146e-05,
"loss": 5.6103,
"step": 26500
},
{
"epoch": 5.384922217790187,
"grad_norm": 2.089535713195801,
"learning_rate": 3.654317909852413e-05,
"loss": 5.6011,
"step": 27000
},
{
"epoch": 5.484642999601117,
"grad_norm": 1.9748643636703491,
"learning_rate": 3.629387714399681e-05,
"loss": 5.6206,
"step": 27500
},
{
"epoch": 5.584363781412046,
"grad_norm": 2.1696887016296387,
"learning_rate": 3.6045073793378544e-05,
"loss": 5.607,
"step": 28000
},
{
"epoch": 5.684084563222975,
"grad_norm": 2.2261533737182617,
"learning_rate": 3.579577183885121e-05,
"loss": 5.5895,
"step": 28500
},
{
"epoch": 5.783805345033905,
"grad_norm": 1.9739435911178589,
"learning_rate": 3.5546469884323896e-05,
"loss": 5.5654,
"step": 29000
},
{
"epoch": 5.883526126844835,
"grad_norm": 2.3373613357543945,
"learning_rate": 3.529716792979657e-05,
"loss": 5.554,
"step": 29500
},
{
"epoch": 5.983246908655763,
"grad_norm": 2.0227203369140625,
"learning_rate": 3.50483645791783e-05,
"loss": 5.569,
"step": 30000
},
{
"epoch": 6.082967690466694,
"grad_norm": 2.1894445419311523,
"learning_rate": 3.479906262465098e-05,
"loss": 5.5268,
"step": 30500
},
{
"epoch": 6.182688472277623,
"grad_norm": 2.3545119762420654,
"learning_rate": 3.4549760670123655e-05,
"loss": 5.5102,
"step": 31000
},
{
"epoch": 6.282409254088552,
"grad_norm": 2.380277156829834,
"learning_rate": 3.430045871559634e-05,
"loss": 5.5301,
"step": 31500
},
{
"epoch": 6.382130035899482,
"grad_norm": 2.288188934326172,
"learning_rate": 3.405165536497806e-05,
"loss": 5.4927,
"step": 32000
},
{
"epoch": 6.481850817710411,
"grad_norm": 2.2211456298828125,
"learning_rate": 3.380235341045074e-05,
"loss": 5.4786,
"step": 32500
},
{
"epoch": 6.58157159952134,
"grad_norm": 2.5629711151123047,
"learning_rate": 3.355305145592342e-05,
"loss": 5.4932,
"step": 33000
},
{
"epoch": 6.68129238133227,
"grad_norm": 2.385563611984253,
"learning_rate": 3.330374950139609e-05,
"loss": 5.4715,
"step": 33500
},
{
"epoch": 6.781013163143199,
"grad_norm": 2.284985303878784,
"learning_rate": 3.305494615077782e-05,
"loss": 5.4599,
"step": 34000
},
{
"epoch": 6.8807339449541285,
"grad_norm": 2.3653366565704346,
"learning_rate": 3.2805644196250504e-05,
"loss": 5.4498,
"step": 34500
},
{
"epoch": 6.980454726765058,
"grad_norm": 2.311102867126465,
"learning_rate": 3.255634224172317e-05,
"loss": 5.4315,
"step": 35000
},
{
"epoch": 7.080175508575987,
"grad_norm": 2.4815216064453125,
"learning_rate": 3.2307040287195855e-05,
"loss": 5.4282,
"step": 35500
},
{
"epoch": 7.179896290386917,
"grad_norm": 2.228046178817749,
"learning_rate": 3.205823693657759e-05,
"loss": 5.4321,
"step": 36000
},
{
"epoch": 7.279617072197846,
"grad_norm": 2.459022283554077,
"learning_rate": 3.1808934982050256e-05,
"loss": 5.3886,
"step": 36500
},
{
"epoch": 7.379337854008775,
"grad_norm": 2.217167615890503,
"learning_rate": 3.155963302752294e-05,
"loss": 5.3924,
"step": 37000
},
{
"epoch": 7.479058635819705,
"grad_norm": 2.3231680393218994,
"learning_rate": 3.1310331072995614e-05,
"loss": 5.389,
"step": 37500
},
{
"epoch": 7.578779417630634,
"grad_norm": 2.220628261566162,
"learning_rate": 3.1061527722377346e-05,
"loss": 5.3856,
"step": 38000
},
{
"epoch": 7.678500199441563,
"grad_norm": 2.612741708755493,
"learning_rate": 3.081222576785002e-05,
"loss": 5.3851,
"step": 38500
},
{
"epoch": 7.778220981252493,
"grad_norm": 2.194031000137329,
"learning_rate": 3.05629238133227e-05,
"loss": 5.3744,
"step": 39000
},
{
"epoch": 7.877941763063422,
"grad_norm": 2.342750310897827,
"learning_rate": 3.0313621858795377e-05,
"loss": 5.3644,
"step": 39500
},
{
"epoch": 7.9776625448743514,
"grad_norm": 2.273401975631714,
"learning_rate": 3.0064818508177105e-05,
"loss": 5.3828,
"step": 40000
},
{
"epoch": 8.07738332668528,
"grad_norm": 2.5998456478118896,
"learning_rate": 2.981551655364978e-05,
"loss": 5.3399,
"step": 40500
},
{
"epoch": 8.177104108496211,
"grad_norm": 2.4312164783477783,
"learning_rate": 2.956621459912246e-05,
"loss": 5.3491,
"step": 41000
},
{
"epoch": 8.27682489030714,
"grad_norm": 2.1767194271087646,
"learning_rate": 2.9316912644595136e-05,
"loss": 5.318,
"step": 41500
},
{
"epoch": 8.37654567211807,
"grad_norm": 2.546261787414551,
"learning_rate": 2.9068109293976864e-05,
"loss": 5.3169,
"step": 42000
},
{
"epoch": 8.476266453928998,
"grad_norm": 2.5187346935272217,
"learning_rate": 2.8818807339449543e-05,
"loss": 5.3051,
"step": 42500
},
{
"epoch": 8.575987235739928,
"grad_norm": 2.4358792304992676,
"learning_rate": 2.856950538492222e-05,
"loss": 5.3063,
"step": 43000
},
{
"epoch": 8.675708017550857,
"grad_norm": 2.22619891166687,
"learning_rate": 2.8320203430394898e-05,
"loss": 5.3241,
"step": 43500
},
{
"epoch": 8.775428799361787,
"grad_norm": 2.6035451889038086,
"learning_rate": 2.8071400079776626e-05,
"loss": 5.2964,
"step": 44000
},
{
"epoch": 8.875149581172716,
"grad_norm": 2.5391156673431396,
"learning_rate": 2.7822098125249302e-05,
"loss": 5.2914,
"step": 44500
},
{
"epoch": 8.974870362983646,
"grad_norm": 2.4130935668945312,
"learning_rate": 2.757279617072198e-05,
"loss": 5.2883,
"step": 45000
},
{
"epoch": 9.074591144794574,
"grad_norm": 2.411205530166626,
"learning_rate": 2.7323494216194657e-05,
"loss": 5.2893,
"step": 45500
},
{
"epoch": 9.174311926605505,
"grad_norm": 2.484266757965088,
"learning_rate": 2.7074690865576385e-05,
"loss": 5.2649,
"step": 46000
},
{
"epoch": 9.274032708416435,
"grad_norm": 2.446840524673462,
"learning_rate": 2.6825388911049064e-05,
"loss": 5.2579,
"step": 46500
},
{
"epoch": 9.373753490227363,
"grad_norm": 2.2476446628570557,
"learning_rate": 2.657608695652174e-05,
"loss": 5.2616,
"step": 47000
},
{
"epoch": 9.473474272038294,
"grad_norm": 2.36161732673645,
"learning_rate": 2.632678500199442e-05,
"loss": 5.2458,
"step": 47500
},
{
"epoch": 9.573195053849222,
"grad_norm": 2.4564807415008545,
"learning_rate": 2.6077981651376147e-05,
"loss": 5.2522,
"step": 48000
},
{
"epoch": 9.672915835660152,
"grad_norm": 2.477536678314209,
"learning_rate": 2.5828679696848823e-05,
"loss": 5.249,
"step": 48500
},
{
"epoch": 9.77263661747108,
"grad_norm": 2.8510327339172363,
"learning_rate": 2.5579377742321503e-05,
"loss": 5.2397,
"step": 49000
},
{
"epoch": 9.872357399282011,
"grad_norm": 2.4770243167877197,
"learning_rate": 2.533007578779418e-05,
"loss": 5.2172,
"step": 49500
},
{
"epoch": 9.97207818109294,
"grad_norm": 2.492191791534424,
"learning_rate": 2.5081272437175906e-05,
"loss": 5.236,
"step": 50000
},
{
"epoch": 10.07179896290387,
"grad_norm": 2.5560014247894287,
"learning_rate": 2.4831970482648582e-05,
"loss": 5.2159,
"step": 50500
},
{
"epoch": 10.171519744714798,
"grad_norm": 2.550168752670288,
"learning_rate": 2.458266852812126e-05,
"loss": 5.2026,
"step": 51000
},
{
"epoch": 10.271240526525728,
"grad_norm": 2.562626600265503,
"learning_rate": 2.4333366573593937e-05,
"loss": 5.2121,
"step": 51500
},
{
"epoch": 10.370961308336657,
"grad_norm": 2.389833927154541,
"learning_rate": 2.408456322297567e-05,
"loss": 5.1963,
"step": 52000
},
{
"epoch": 10.470682090147587,
"grad_norm": 2.617138385772705,
"learning_rate": 2.3835261268448345e-05,
"loss": 5.1838,
"step": 52500
},
{
"epoch": 10.570402871958516,
"grad_norm": 2.6732029914855957,
"learning_rate": 2.358595931392102e-05,
"loss": 5.2135,
"step": 53000
},
{
"epoch": 10.670123653769446,
"grad_norm": 2.509752035140991,
"learning_rate": 2.33366573593937e-05,
"loss": 5.1937,
"step": 53500
},
{
"epoch": 10.769844435580374,
"grad_norm": 2.732623815536499,
"learning_rate": 2.3087355404866376e-05,
"loss": 5.2013,
"step": 54000
},
{
"epoch": 10.869565217391305,
"grad_norm": 2.7967655658721924,
"learning_rate": 2.2838552054248104e-05,
"loss": 5.1751,
"step": 54500
},
{
"epoch": 10.969285999202233,
"grad_norm": 2.6768581867218018,
"learning_rate": 2.2589250099720783e-05,
"loss": 5.1728,
"step": 55000
},
{
"epoch": 11.069006781013163,
"grad_norm": 2.3465123176574707,
"learning_rate": 2.233994814519346e-05,
"loss": 5.1869,
"step": 55500
},
{
"epoch": 11.168727562824092,
"grad_norm": 2.3460209369659424,
"learning_rate": 2.2090646190666138e-05,
"loss": 5.1704,
"step": 56000
},
{
"epoch": 11.268448344635022,
"grad_norm": 2.7022573947906494,
"learning_rate": 2.1841842840047866e-05,
"loss": 5.1508,
"step": 56500
},
{
"epoch": 11.36816912644595,
"grad_norm": 2.5259013175964355,
"learning_rate": 2.1592540885520542e-05,
"loss": 5.1441,
"step": 57000
},
{
"epoch": 11.46788990825688,
"grad_norm": 2.6938321590423584,
"learning_rate": 2.134323893099322e-05,
"loss": 5.1628,
"step": 57500
},
{
"epoch": 11.56761069006781,
"grad_norm": 2.874973773956299,
"learning_rate": 2.1093936976465897e-05,
"loss": 5.1405,
"step": 58000
},
{
"epoch": 11.66733147187874,
"grad_norm": 2.762739896774292,
"learning_rate": 2.0845133625847625e-05,
"loss": 5.1264,
"step": 58500
},
{
"epoch": 11.76705225368967,
"grad_norm": 2.617100954055786,
"learning_rate": 2.0595831671320304e-05,
"loss": 5.1297,
"step": 59000
},
{
"epoch": 11.866773035500598,
"grad_norm": 2.754258632659912,
"learning_rate": 2.034652971679298e-05,
"loss": 5.1349,
"step": 59500
},
{
"epoch": 11.966493817311529,
"grad_norm": 2.57446551322937,
"learning_rate": 2.009722776226566e-05,
"loss": 5.1401,
"step": 60000
},
{
"epoch": 12.066214599122457,
"grad_norm": 2.7069363594055176,
"learning_rate": 1.9848424411647387e-05,
"loss": 5.1093,
"step": 60500
},
{
"epoch": 12.165935380933387,
"grad_norm": 2.5617587566375732,
"learning_rate": 1.9599122457120063e-05,
"loss": 5.1071,
"step": 61000
},
{
"epoch": 12.265656162744316,
"grad_norm": 2.7849984169006348,
"learning_rate": 1.9349820502592742e-05,
"loss": 5.1298,
"step": 61500
},
{
"epoch": 12.365376944555246,
"grad_norm": 2.6736953258514404,
"learning_rate": 1.910051854806542e-05,
"loss": 5.0828,
"step": 62000
},
{
"epoch": 12.465097726366174,
"grad_norm": 2.623760938644409,
"learning_rate": 1.8851715197447146e-05,
"loss": 5.1166,
"step": 62500
},
{
"epoch": 12.564818508177105,
"grad_norm": 3.202988624572754,
"learning_rate": 1.8602413242919826e-05,
"loss": 5.1128,
"step": 63000
},
{
"epoch": 12.664539289988033,
"grad_norm": 2.4918911457061768,
"learning_rate": 1.83531112883925e-05,
"loss": 5.1181,
"step": 63500
},
{
"epoch": 12.764260071798963,
"grad_norm": 2.7274303436279297,
"learning_rate": 1.810380933386518e-05,
"loss": 5.1089,
"step": 64000
},
{
"epoch": 12.863980853609892,
"grad_norm": 2.760390520095825,
"learning_rate": 1.785500598324691e-05,
"loss": 5.1175,
"step": 64500
},
{
"epoch": 12.963701635420822,
"grad_norm": 2.7950050830841064,
"learning_rate": 1.7605704028719585e-05,
"loss": 5.1173,
"step": 65000
},
{
"epoch": 13.06342241723175,
"grad_norm": 2.5247349739074707,
"learning_rate": 1.7356402074192264e-05,
"loss": 5.0765,
"step": 65500
},
{
"epoch": 13.16314319904268,
"grad_norm": 2.5236001014709473,
"learning_rate": 1.710710011966494e-05,
"loss": 5.0699,
"step": 66000
},
{
"epoch": 13.26286398085361,
"grad_norm": 2.407404661178589,
"learning_rate": 1.6858296769046668e-05,
"loss": 5.0999,
"step": 66500
},
{
"epoch": 13.36258476266454,
"grad_norm": 2.665024518966675,
"learning_rate": 1.6608994814519347e-05,
"loss": 5.0852,
"step": 67000
},
{
"epoch": 13.462305544475468,
"grad_norm": 2.7694313526153564,
"learning_rate": 1.6359692859992023e-05,
"loss": 5.0785,
"step": 67500
},
{
"epoch": 13.562026326286398,
"grad_norm": 2.839297294616699,
"learning_rate": 1.6110390905464702e-05,
"loss": 5.0826,
"step": 68000
},
{
"epoch": 13.661747108097327,
"grad_norm": 2.831908941268921,
"learning_rate": 1.586158755484643e-05,
"loss": 5.0877,
"step": 68500
},
{
"epoch": 13.761467889908257,
"grad_norm": 2.9526407718658447,
"learning_rate": 1.5612285600319106e-05,
"loss": 5.0692,
"step": 69000
},
{
"epoch": 13.861188671719185,
"grad_norm": 2.832224130630493,
"learning_rate": 1.5362983645791785e-05,
"loss": 5.0687,
"step": 69500
},
{
"epoch": 13.960909453530116,
"grad_norm": 2.622544050216675,
"learning_rate": 1.5113681691264461e-05,
"loss": 5.065,
"step": 70000
},
{
"epoch": 14.060630235341046,
"grad_norm": 2.9850549697875977,
"learning_rate": 1.486487834064619e-05,
"loss": 5.0665,
"step": 70500
},
{
"epoch": 14.160351017151974,
"grad_norm": 2.7051777839660645,
"learning_rate": 1.4615576386118868e-05,
"loss": 5.0509,
"step": 71000
},
{
"epoch": 14.260071798962905,
"grad_norm": 2.6535110473632812,
"learning_rate": 1.4366274431591544e-05,
"loss": 5.0426,
"step": 71500
},
{
"epoch": 14.359792580773833,
"grad_norm": 2.8298914432525635,
"learning_rate": 1.4116972477064222e-05,
"loss": 5.0348,
"step": 72000
},
{
"epoch": 14.459513362584763,
"grad_norm": 2.776466131210327,
"learning_rate": 1.3868169126445951e-05,
"loss": 5.0486,
"step": 72500
},
{
"epoch": 14.559234144395692,
"grad_norm": 2.762392044067383,
"learning_rate": 1.3618867171918629e-05,
"loss": 5.0478,
"step": 73000
},
{
"epoch": 14.658954926206622,
"grad_norm": 2.7879889011383057,
"learning_rate": 1.3369565217391305e-05,
"loss": 5.0453,
"step": 73500
},
{
"epoch": 14.75867570801755,
"grad_norm": 2.7736377716064453,
"learning_rate": 1.3120263262863982e-05,
"loss": 5.0538,
"step": 74000
},
{
"epoch": 14.85839648982848,
"grad_norm": 3.0703177452087402,
"learning_rate": 1.2871459912245712e-05,
"loss": 5.0649,
"step": 74500
},
{
"epoch": 14.95811727163941,
"grad_norm": 2.8057234287261963,
"learning_rate": 1.262215795771839e-05,
"loss": 5.0558,
"step": 75000
},
{
"epoch": 15.05783805345034,
"grad_norm": 2.9843761920928955,
"learning_rate": 1.2372856003191066e-05,
"loss": 5.0367,
"step": 75500
},
{
"epoch": 15.157558835261268,
"grad_norm": 2.723043918609619,
"learning_rate": 1.2123554048663741e-05,
"loss": 5.0407,
"step": 76000
},
{
"epoch": 15.257279617072198,
"grad_norm": 2.729093551635742,
"learning_rate": 1.1874750698045473e-05,
"loss": 5.0261,
"step": 76500
},
{
"epoch": 15.357000398883127,
"grad_norm": 2.7686829566955566,
"learning_rate": 1.162544874351815e-05,
"loss": 5.0189,
"step": 77000
},
{
"epoch": 15.456721180694057,
"grad_norm": 2.7151553630828857,
"learning_rate": 1.1376146788990826e-05,
"loss": 5.0186,
"step": 77500
},
{
"epoch": 15.556441962504985,
"grad_norm": 2.6453444957733154,
"learning_rate": 1.1126844834463502e-05,
"loss": 5.0252,
"step": 78000
},
{
"epoch": 15.656162744315916,
"grad_norm": 2.6896181106567383,
"learning_rate": 1.0878041483845234e-05,
"loss": 5.0129,
"step": 78500
},
{
"epoch": 15.755883526126844,
"grad_norm": 2.6944217681884766,
"learning_rate": 1.0628739529317911e-05,
"loss": 5.0443,
"step": 79000
},
{
"epoch": 15.855604307937774,
"grad_norm": 2.782818555831909,
"learning_rate": 1.0379437574790587e-05,
"loss": 5.0325,
"step": 79500
},
{
"epoch": 15.955325089748703,
"grad_norm": 2.8319053649902344,
"learning_rate": 1.0130135620263263e-05,
"loss": 5.0277,
"step": 80000
},
{
"epoch": 16.05504587155963,
"grad_norm": 2.616605758666992,
"learning_rate": 9.881332269644994e-06,
"loss": 5.0127,
"step": 80500
},
{
"epoch": 16.15476665337056,
"grad_norm": 2.8563239574432373,
"learning_rate": 9.632030315117672e-06,
"loss": 5.0094,
"step": 81000
},
{
"epoch": 16.254487435181492,
"grad_norm": 2.536868095397949,
"learning_rate": 9.382728360590348e-06,
"loss": 4.9974,
"step": 81500
},
{
"epoch": 16.354208216992422,
"grad_norm": 2.8117527961730957,
"learning_rate": 9.133426406063023e-06,
"loss": 5.0044,
"step": 82000
},
{
"epoch": 16.453928998803352,
"grad_norm": 2.776693344116211,
"learning_rate": 8.884623055444755e-06,
"loss": 4.994,
"step": 82500
},
{
"epoch": 16.55364978061428,
"grad_norm": 2.7791900634765625,
"learning_rate": 8.635321100917432e-06,
"loss": 5.0239,
"step": 83000
},
{
"epoch": 16.65337056242521,
"grad_norm": 2.8289597034454346,
"learning_rate": 8.386019146390108e-06,
"loss": 5.0059,
"step": 83500
},
{
"epoch": 16.75309134423614,
"grad_norm": 2.89103102684021,
"learning_rate": 8.136717191862784e-06,
"loss": 5.0237,
"step": 84000
},
{
"epoch": 16.85281212604707,
"grad_norm": 2.7034354209899902,
"learning_rate": 7.887913841244516e-06,
"loss": 5.0107,
"step": 84500
},
{
"epoch": 16.952532907857996,
"grad_norm": 3.348228693008423,
"learning_rate": 7.638611886717193e-06,
"loss": 5.0128,
"step": 85000
},
{
"epoch": 17.052253689668927,
"grad_norm": 2.9482028484344482,
"learning_rate": 7.389309932189868e-06,
"loss": 5.0143,
"step": 85500
},
{
"epoch": 17.151974471479857,
"grad_norm": 2.8442418575286865,
"learning_rate": 7.140007977662546e-06,
"loss": 4.9986,
"step": 86000
},
{
"epoch": 17.251695253290787,
"grad_norm": 2.7483792304992676,
"learning_rate": 6.891204627044276e-06,
"loss": 5.0003,
"step": 86500
},
{
"epoch": 17.351416035101714,
"grad_norm": 2.8354785442352295,
"learning_rate": 6.641902672516953e-06,
"loss": 5.0106,
"step": 87000
},
{
"epoch": 17.451136816912644,
"grad_norm": 3.0782690048217773,
"learning_rate": 6.392600717989629e-06,
"loss": 4.9894,
"step": 87500
},
{
"epoch": 17.550857598723574,
"grad_norm": 2.572624444961548,
"learning_rate": 6.143298763462306e-06,
"loss": 4.9901,
"step": 88000
},
{
"epoch": 17.650578380534505,
"grad_norm": 2.726686477661133,
"learning_rate": 5.894495412844037e-06,
"loss": 5.0045,
"step": 88500
},
{
"epoch": 17.75029916234543,
"grad_norm": 2.79811429977417,
"learning_rate": 5.645193458316714e-06,
"loss": 4.9927,
"step": 89000
},
{
"epoch": 17.85001994415636,
"grad_norm": 2.7528791427612305,
"learning_rate": 5.39589150378939e-06,
"loss": 4.9834,
"step": 89500
},
{
"epoch": 17.949740725967292,
"grad_norm": 2.85099196434021,
"learning_rate": 5.146589549262067e-06,
"loss": 4.9967,
"step": 90000
},
{
"epoch": 18.049461507778222,
"grad_norm": 2.686501979827881,
"learning_rate": 4.897786198643798e-06,
"loss": 4.9948,
"step": 90500
},
{
"epoch": 18.14918228958915,
"grad_norm": 3.057145357131958,
"learning_rate": 4.648484244116474e-06,
"loss": 4.9885,
"step": 91000
},
{
"epoch": 18.24890307140008,
"grad_norm": 2.915149211883545,
"learning_rate": 4.399182289589151e-06,
"loss": 4.9876,
"step": 91500
},
{
"epoch": 18.34862385321101,
"grad_norm": 2.907449960708618,
"learning_rate": 4.149880335061828e-06,
"loss": 5.0049,
"step": 92000
},
{
"epoch": 18.44834463502194,
"grad_norm": 2.798488140106201,
"learning_rate": 3.901076984443558e-06,
"loss": 4.9643,
"step": 92500
},
{
"epoch": 18.54806541683287,
"grad_norm": 2.8539681434631348,
"learning_rate": 3.6517750299162346e-06,
"loss": 4.9917,
"step": 93000
},
{
"epoch": 18.647786198643796,
"grad_norm": 2.7598862648010254,
"learning_rate": 3.4024730753889117e-06,
"loss": 4.9894,
"step": 93500
},
{
"epoch": 18.747506980454727,
"grad_norm": 2.577714443206787,
"learning_rate": 3.153171120861588e-06,
"loss": 4.9607,
"step": 94000
},
{
"epoch": 18.847227762265657,
"grad_norm": 2.8153915405273438,
"learning_rate": 2.904367770243319e-06,
"loss": 4.9905,
"step": 94500
},
{
"epoch": 18.946948544076587,
"grad_norm": 2.7925517559051514,
"learning_rate": 2.6550658157159952e-06,
"loss": 5.0036,
"step": 95000
},
{
"epoch": 19.046669325887514,
"grad_norm": 2.60158371925354,
"learning_rate": 2.405763861188672e-06,
"loss": 4.9877,
"step": 95500
},
{
"epoch": 19.146390107698444,
"grad_norm": 2.7144620418548584,
"learning_rate": 2.156461906661348e-06,
"loss": 5.0011,
"step": 96000
},
{
"epoch": 19.246110889509374,
"grad_norm": 2.779771327972412,
"learning_rate": 1.9076585560430792e-06,
"loss": 4.9869,
"step": 96500
},
{
"epoch": 19.345831671320305,
"grad_norm": 2.8586442470550537,
"learning_rate": 1.6583566015157561e-06,
"loss": 4.9858,
"step": 97000
},
{
"epoch": 19.44555245313123,
"grad_norm": 2.572789430618286,
"learning_rate": 1.4090546469884324e-06,
"loss": 4.9934,
"step": 97500
},
{
"epoch": 19.54527323494216,
"grad_norm": 2.7267684936523438,
"learning_rate": 1.159752692461109e-06,
"loss": 4.9714,
"step": 98000
},
{
"epoch": 19.644994016753092,
"grad_norm": 2.7596216201782227,
"learning_rate": 9.109493418428401e-07,
"loss": 4.9698,
"step": 98500
},
{
"epoch": 19.744714798564022,
"grad_norm": 2.850459575653076,
"learning_rate": 6.616473873155166e-07,
"loss": 4.9653,
"step": 99000
},
{
"epoch": 19.84443558037495,
"grad_norm": 3.2066328525543213,
"learning_rate": 4.1234543278819307e-07,
"loss": 4.964,
"step": 99500
},
{
"epoch": 19.94415636218588,
"grad_norm": 3.170915365219116,
"learning_rate": 1.6304347826086955e-07,
"loss": 4.9803,
"step": 100000
},
{
"epoch": 20.0,
"step": 100280,
"total_flos": 488922611712000.0,
"train_loss": 5.48259629872295,
"train_runtime": 4495.3437,
"train_samples_per_second": 356.898,
"train_steps_per_second": 22.308
}
],
"logging_steps": 500,
"max_steps": 100280,
"num_input_tokens_seen": 0,
"num_train_epochs": 20,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 488922611712000.0,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}