youssefkhalil320's picture
Upload folder using huggingface_hub
ac8b743 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.1886449447629701,
"eval_steps": 5000,
"global_step": 17000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.006992029086841001,
"grad_norm": 75.90129852294922,
"learning_rate": 6.780845858091577e-07,
"loss": 16.865,
"step": 100
},
{
"epoch": 0.013984058173682002,
"grad_norm": 71.16612243652344,
"learning_rate": 1.3771408598392172e-06,
"loss": 16.1556,
"step": 200
},
{
"epoch": 0.020976087260523003,
"grad_norm": 130.67613220214844,
"learning_rate": 2.0761971338692767e-06,
"loss": 14.8008,
"step": 300
},
{
"epoch": 0.027968116347364003,
"grad_norm": 36.4179801940918,
"learning_rate": 2.7682628451590356e-06,
"loss": 12.4025,
"step": 400
},
{
"epoch": 0.034960145434205,
"grad_norm": 34.748844146728516,
"learning_rate": 3.467319119189095e-06,
"loss": 9.7465,
"step": 500
},
{
"epoch": 0.04195217452104601,
"grad_norm": 8.039146423339844,
"learning_rate": 4.166375393219155e-06,
"loss": 8.448,
"step": 600
},
{
"epoch": 0.04894420360788701,
"grad_norm": 10.288451194763184,
"learning_rate": 4.865431667249214e-06,
"loss": 8.1951,
"step": 700
},
{
"epoch": 0.05593623269472801,
"grad_norm": 6.3236308097839355,
"learning_rate": 5.564487941279273e-06,
"loss": 8.1093,
"step": 800
},
{
"epoch": 0.06292826178156902,
"grad_norm": 15.230368614196777,
"learning_rate": 6.263544215309333e-06,
"loss": 8.0567,
"step": 900
},
{
"epoch": 0.06992029086841,
"grad_norm": 5.573168754577637,
"learning_rate": 6.962600489339393e-06,
"loss": 8.0401,
"step": 1000
},
{
"epoch": 0.07691231995525101,
"grad_norm": 6.7740325927734375,
"learning_rate": 7.661656763369451e-06,
"loss": 7.9491,
"step": 1100
},
{
"epoch": 0.08390434904209201,
"grad_norm": 6.167610168457031,
"learning_rate": 8.360713037399512e-06,
"loss": 7.9494,
"step": 1200
},
{
"epoch": 0.09089637812893302,
"grad_norm": 5.53561544418335,
"learning_rate": 9.05976931142957e-06,
"loss": 7.9386,
"step": 1300
},
{
"epoch": 0.09788840721577402,
"grad_norm": 6.056953430175781,
"learning_rate": 9.758825585459631e-06,
"loss": 7.9033,
"step": 1400
},
{
"epoch": 0.10488043630261502,
"grad_norm": 5.160706043243408,
"learning_rate": 1.045788185948969e-05,
"loss": 7.9055,
"step": 1500
},
{
"epoch": 0.11187246538945601,
"grad_norm": 5.525616645812988,
"learning_rate": 1.115693813351975e-05,
"loss": 7.9203,
"step": 1600
},
{
"epoch": 0.11886449447629702,
"grad_norm": 5.553597927093506,
"learning_rate": 1.185599440754981e-05,
"loss": 7.8381,
"step": 1700
},
{
"epoch": 0.12585652356313803,
"grad_norm": 4.8832478523254395,
"learning_rate": 1.2555050681579868e-05,
"loss": 7.8679,
"step": 1800
},
{
"epoch": 0.13284855264997902,
"grad_norm": 5.308164596557617,
"learning_rate": 1.3254106955609927e-05,
"loss": 7.8686,
"step": 1900
},
{
"epoch": 0.13984058173682,
"grad_norm": 6.790646553039551,
"learning_rate": 1.3953163229639988e-05,
"loss": 7.8252,
"step": 2000
},
{
"epoch": 0.14683261082366103,
"grad_norm": 5.003779888153076,
"learning_rate": 1.4652219503670046e-05,
"loss": 7.856,
"step": 2100
},
{
"epoch": 0.15382463991050202,
"grad_norm": 5.0926642417907715,
"learning_rate": 1.5351275777700107e-05,
"loss": 7.8301,
"step": 2200
},
{
"epoch": 0.16081666899734304,
"grad_norm": 4.154193878173828,
"learning_rate": 1.6050332051730168e-05,
"loss": 7.8595,
"step": 2300
},
{
"epoch": 0.16780869808418403,
"grad_norm": 4.168135166168213,
"learning_rate": 1.6749388325760225e-05,
"loss": 7.8138,
"step": 2400
},
{
"epoch": 0.17480072717102504,
"grad_norm": 4.69076681137085,
"learning_rate": 1.7448444599790285e-05,
"loss": 7.812,
"step": 2500
},
{
"epoch": 0.18179275625786603,
"grad_norm": 4.386991500854492,
"learning_rate": 1.8147500873820346e-05,
"loss": 7.8261,
"step": 2600
},
{
"epoch": 0.18878478534470702,
"grad_norm": 5.777139186859131,
"learning_rate": 1.8846557147850403e-05,
"loss": 7.7988,
"step": 2700
},
{
"epoch": 0.19577681443154804,
"grad_norm": 18.3277645111084,
"learning_rate": 1.9545613421880464e-05,
"loss": 7.7965,
"step": 2800
},
{
"epoch": 0.20276884351838903,
"grad_norm": 4.5356669425964355,
"learning_rate": 1.997280814201919e-05,
"loss": 7.783,
"step": 2900
},
{
"epoch": 0.20976087260523005,
"grad_norm": 5.4392476081848145,
"learning_rate": 1.9895117119216877e-05,
"loss": 7.7752,
"step": 3000
},
{
"epoch": 0.21675290169207104,
"grad_norm": 5.365427017211914,
"learning_rate": 1.981742609641456e-05,
"loss": 7.7715,
"step": 3100
},
{
"epoch": 0.22374493077891203,
"grad_norm": 4.111672401428223,
"learning_rate": 1.9739735073612244e-05,
"loss": 7.7903,
"step": 3200
},
{
"epoch": 0.23073695986575304,
"grad_norm": 4.107003211975098,
"learning_rate": 1.9662044050809928e-05,
"loss": 7.7656,
"step": 3300
},
{
"epoch": 0.23772898895259403,
"grad_norm": 4.667971611022949,
"learning_rate": 1.9584353028007615e-05,
"loss": 7.749,
"step": 3400
},
{
"epoch": 0.24472101803943505,
"grad_norm": 4.010958194732666,
"learning_rate": 1.95066620052053e-05,
"loss": 7.7662,
"step": 3500
},
{
"epoch": 0.25171304712627607,
"grad_norm": 4.07546329498291,
"learning_rate": 1.9428970982402986e-05,
"loss": 7.7492,
"step": 3600
},
{
"epoch": 0.25870507621311706,
"grad_norm": 5.414000988006592,
"learning_rate": 1.935127995960067e-05,
"loss": 7.737,
"step": 3700
},
{
"epoch": 0.26569710529995805,
"grad_norm": 4.4744648933410645,
"learning_rate": 1.9273588936798357e-05,
"loss": 7.7232,
"step": 3800
},
{
"epoch": 0.27268913438679904,
"grad_norm": 4.141202926635742,
"learning_rate": 1.919589791399604e-05,
"loss": 7.7616,
"step": 3900
},
{
"epoch": 0.27968116347364,
"grad_norm": 4.493043899536133,
"learning_rate": 1.9118206891193724e-05,
"loss": 7.7391,
"step": 4000
},
{
"epoch": 0.28667319256048107,
"grad_norm": 4.083165168762207,
"learning_rate": 1.904051586839141e-05,
"loss": 7.7552,
"step": 4100
},
{
"epoch": 0.29366522164732206,
"grad_norm": 8.303427696228027,
"learning_rate": 1.8962824845589095e-05,
"loss": 7.7273,
"step": 4200
},
{
"epoch": 0.30065725073416305,
"grad_norm": 4.003914833068848,
"learning_rate": 1.888513382278678e-05,
"loss": 7.7216,
"step": 4300
},
{
"epoch": 0.30764927982100404,
"grad_norm": 4.859315395355225,
"learning_rate": 1.8807442799984462e-05,
"loss": 7.7371,
"step": 4400
},
{
"epoch": 0.31464130890784503,
"grad_norm": 4.693440914154053,
"learning_rate": 1.872975177718215e-05,
"loss": 7.7426,
"step": 4500
},
{
"epoch": 0.3216333379946861,
"grad_norm": 4.81652307510376,
"learning_rate": 1.8652060754379833e-05,
"loss": 7.7406,
"step": 4600
},
{
"epoch": 0.32862536708152706,
"grad_norm": 3.861663341522217,
"learning_rate": 1.8574369731577516e-05,
"loss": 7.712,
"step": 4700
},
{
"epoch": 0.33561739616836805,
"grad_norm": 4.218888282775879,
"learning_rate": 1.8496678708775203e-05,
"loss": 7.7466,
"step": 4800
},
{
"epoch": 0.34260942525520904,
"grad_norm": 8.032899856567383,
"learning_rate": 1.8418987685972887e-05,
"loss": 7.7058,
"step": 4900
},
{
"epoch": 0.3496014543420501,
"grad_norm": 4.292692184448242,
"learning_rate": 1.8342073573398596e-05,
"loss": 7.7139,
"step": 5000
},
{
"epoch": 0.3496014543420501,
"eval_loss": 7.6895527839660645,
"eval_runtime": 270.2677,
"eval_samples_per_second": 1696.499,
"eval_steps_per_second": 13.257,
"step": 5000
},
{
"epoch": 0.3565934834288911,
"grad_norm": 4.507449626922607,
"learning_rate": 1.826438255059628e-05,
"loss": 7.7457,
"step": 5100
},
{
"epoch": 0.36358551251573207,
"grad_norm": 10.12509536743164,
"learning_rate": 1.8186691527793966e-05,
"loss": 7.7172,
"step": 5200
},
{
"epoch": 0.37057754160257306,
"grad_norm": 4.254384517669678,
"learning_rate": 1.810900050499165e-05,
"loss": 7.739,
"step": 5300
},
{
"epoch": 0.37756957068941405,
"grad_norm": 6.059901237487793,
"learning_rate": 1.8031309482189334e-05,
"loss": 7.7259,
"step": 5400
},
{
"epoch": 0.3845615997762551,
"grad_norm": 5.770705223083496,
"learning_rate": 1.7953618459387017e-05,
"loss": 7.6977,
"step": 5500
},
{
"epoch": 0.3915536288630961,
"grad_norm": 6.927788257598877,
"learning_rate": 1.7875927436584705e-05,
"loss": 7.7237,
"step": 5600
},
{
"epoch": 0.39854565794993707,
"grad_norm": 4.323265552520752,
"learning_rate": 1.7798236413782388e-05,
"loss": 7.7118,
"step": 5700
},
{
"epoch": 0.40553768703677806,
"grad_norm": 4.448848247528076,
"learning_rate": 1.7720545390980072e-05,
"loss": 7.7099,
"step": 5800
},
{
"epoch": 0.41252971612361905,
"grad_norm": 66.31098175048828,
"learning_rate": 1.764285436817776e-05,
"loss": 7.7142,
"step": 5900
},
{
"epoch": 0.4195217452104601,
"grad_norm": 6.65512228012085,
"learning_rate": 1.7565163345375443e-05,
"loss": 7.6885,
"step": 6000
},
{
"epoch": 0.4265137742973011,
"grad_norm": 4.743120193481445,
"learning_rate": 1.748747232257313e-05,
"loss": 7.6799,
"step": 6100
},
{
"epoch": 0.4335058033841421,
"grad_norm": 4.90665864944458,
"learning_rate": 1.7409781299770813e-05,
"loss": 7.7039,
"step": 6200
},
{
"epoch": 0.44049783247098306,
"grad_norm": 6.465912342071533,
"learning_rate": 1.7332090276968497e-05,
"loss": 7.6825,
"step": 6300
},
{
"epoch": 0.44748986155782405,
"grad_norm": 3.6341958045959473,
"learning_rate": 1.7254399254166184e-05,
"loss": 7.6846,
"step": 6400
},
{
"epoch": 0.4544818906446651,
"grad_norm": 3.931257486343384,
"learning_rate": 1.7176708231363868e-05,
"loss": 7.7078,
"step": 6500
},
{
"epoch": 0.4614739197315061,
"grad_norm": 4.2776665687561035,
"learning_rate": 1.709901720856155e-05,
"loss": 7.6945,
"step": 6600
},
{
"epoch": 0.4684659488183471,
"grad_norm": 4.510386943817139,
"learning_rate": 1.702132618575924e-05,
"loss": 7.7017,
"step": 6700
},
{
"epoch": 0.47545797790518807,
"grad_norm": 5.780529975891113,
"learning_rate": 1.6943635162956922e-05,
"loss": 7.6781,
"step": 6800
},
{
"epoch": 0.4824500069920291,
"grad_norm": 5.802525997161865,
"learning_rate": 1.6865944140154606e-05,
"loss": 7.6885,
"step": 6900
},
{
"epoch": 0.4894420360788701,
"grad_norm": 5.534703254699707,
"learning_rate": 1.6789030027580314e-05,
"loss": 7.7426,
"step": 7000
},
{
"epoch": 0.4964340651657111,
"grad_norm": 5.897773742675781,
"learning_rate": 1.6711339004778e-05,
"loss": 7.6809,
"step": 7100
},
{
"epoch": 0.5034260942525521,
"grad_norm": 5.115645408630371,
"learning_rate": 1.6633647981975685e-05,
"loss": 7.6977,
"step": 7200
},
{
"epoch": 0.5104181233393931,
"grad_norm": 7.200738430023193,
"learning_rate": 1.655595695917337e-05,
"loss": 7.6964,
"step": 7300
},
{
"epoch": 0.5174101524262341,
"grad_norm": 5.526965618133545,
"learning_rate": 1.6478265936371052e-05,
"loss": 7.6834,
"step": 7400
},
{
"epoch": 0.524402181513075,
"grad_norm": 8.350557327270508,
"learning_rate": 1.640057491356874e-05,
"loss": 7.6593,
"step": 7500
},
{
"epoch": 0.5313942105999161,
"grad_norm": 4.367435455322266,
"learning_rate": 1.6322883890766423e-05,
"loss": 7.6745,
"step": 7600
},
{
"epoch": 0.5383862396867571,
"grad_norm": 7.726882457733154,
"learning_rate": 1.6245192867964107e-05,
"loss": 7.6587,
"step": 7700
},
{
"epoch": 0.5453782687735981,
"grad_norm": 5.289243221282959,
"learning_rate": 1.6167501845161794e-05,
"loss": 7.6389,
"step": 7800
},
{
"epoch": 0.5523702978604391,
"grad_norm": 4.716782093048096,
"learning_rate": 1.6089810822359478e-05,
"loss": 7.6298,
"step": 7900
},
{
"epoch": 0.55936232694728,
"grad_norm": 4.2459797859191895,
"learning_rate": 1.601211979955716e-05,
"loss": 7.6693,
"step": 8000
},
{
"epoch": 0.5663543560341211,
"grad_norm": 4.4402594566345215,
"learning_rate": 1.5934428776754848e-05,
"loss": 7.6454,
"step": 8100
},
{
"epoch": 0.5733463851209621,
"grad_norm": 20.979795455932617,
"learning_rate": 1.5856737753952532e-05,
"loss": 7.6491,
"step": 8200
},
{
"epoch": 0.5803384142078031,
"grad_norm": 4.494659900665283,
"learning_rate": 1.5779046731150216e-05,
"loss": 7.661,
"step": 8300
},
{
"epoch": 0.5873304432946441,
"grad_norm": 4.788604736328125,
"learning_rate": 1.57013557083479e-05,
"loss": 7.6525,
"step": 8400
},
{
"epoch": 0.5943224723814851,
"grad_norm": 4.576944351196289,
"learning_rate": 1.5623664685545586e-05,
"loss": 7.6669,
"step": 8500
},
{
"epoch": 0.6013145014683261,
"grad_norm": 5.762516498565674,
"learning_rate": 1.5545973662743273e-05,
"loss": 7.6379,
"step": 8600
},
{
"epoch": 0.6083065305551671,
"grad_norm": 4.864853858947754,
"learning_rate": 1.5468282639940957e-05,
"loss": 7.6706,
"step": 8700
},
{
"epoch": 0.6152985596420081,
"grad_norm": 11.886726379394531,
"learning_rate": 1.539059161713864e-05,
"loss": 7.6487,
"step": 8800
},
{
"epoch": 0.6222905887288491,
"grad_norm": 4.3859052658081055,
"learning_rate": 1.5312900594336328e-05,
"loss": 7.6607,
"step": 8900
},
{
"epoch": 0.6292826178156901,
"grad_norm": 4.877004146575928,
"learning_rate": 1.5235209571534011e-05,
"loss": 7.6334,
"step": 9000
},
{
"epoch": 0.6362746469025311,
"grad_norm": 10.190913200378418,
"learning_rate": 1.5158295458959717e-05,
"loss": 7.6891,
"step": 9100
},
{
"epoch": 0.6432666759893721,
"grad_norm": 4.916624546051025,
"learning_rate": 1.5080604436157402e-05,
"loss": 7.734,
"step": 9200
},
{
"epoch": 0.6502587050762131,
"grad_norm": 5.250345230102539,
"learning_rate": 1.5002913413355089e-05,
"loss": 7.6283,
"step": 9300
},
{
"epoch": 0.6572507341630541,
"grad_norm": 4.70583963394165,
"learning_rate": 1.4925222390552775e-05,
"loss": 7.6461,
"step": 9400
},
{
"epoch": 0.6642427632498952,
"grad_norm": 4.943760871887207,
"learning_rate": 1.4847531367750458e-05,
"loss": 7.623,
"step": 9500
},
{
"epoch": 0.6712347923367361,
"grad_norm": 6.2978715896606445,
"learning_rate": 1.4769840344948144e-05,
"loss": 7.6251,
"step": 9600
},
{
"epoch": 0.6782268214235772,
"grad_norm": 4.278825759887695,
"learning_rate": 1.4692149322145827e-05,
"loss": 7.6663,
"step": 9700
},
{
"epoch": 0.6852188505104181,
"grad_norm": 6.855334758758545,
"learning_rate": 1.4614458299343513e-05,
"loss": 7.6376,
"step": 9800
},
{
"epoch": 0.6922108795972591,
"grad_norm": 9.894013404846191,
"learning_rate": 1.4536767276541198e-05,
"loss": 7.6834,
"step": 9900
},
{
"epoch": 0.6992029086841002,
"grad_norm": 4.454260349273682,
"learning_rate": 1.4459076253738882e-05,
"loss": 7.6851,
"step": 10000
},
{
"epoch": 0.6992029086841002,
"eval_loss": 7.6099066734313965,
"eval_runtime": 436.4855,
"eval_samples_per_second": 1050.456,
"eval_steps_per_second": 8.209,
"step": 10000
},
{
"epoch": 0.7061949377709411,
"grad_norm": 5.147873878479004,
"learning_rate": 1.4381385230936567e-05,
"loss": 7.6034,
"step": 10100
},
{
"epoch": 0.7131869668577822,
"grad_norm": 5.6142964363098145,
"learning_rate": 1.430369420813425e-05,
"loss": 7.6512,
"step": 10200
},
{
"epoch": 0.7201789959446231,
"grad_norm": 10.30461597442627,
"learning_rate": 1.4226003185331936e-05,
"loss": 7.6413,
"step": 10300
},
{
"epoch": 0.7271710250314641,
"grad_norm": 6.112381458282471,
"learning_rate": 1.4148312162529621e-05,
"loss": 7.6083,
"step": 10400
},
{
"epoch": 0.7341630541183052,
"grad_norm": 4.750720977783203,
"learning_rate": 1.4070621139727305e-05,
"loss": 7.6475,
"step": 10500
},
{
"epoch": 0.7411550832051461,
"grad_norm": 4.715427875518799,
"learning_rate": 1.399293011692499e-05,
"loss": 7.61,
"step": 10600
},
{
"epoch": 0.7481471122919872,
"grad_norm": 5.024092197418213,
"learning_rate": 1.3915239094122674e-05,
"loss": 7.6404,
"step": 10700
},
{
"epoch": 0.7551391413788281,
"grad_norm": 7.21763801574707,
"learning_rate": 1.383754807132036e-05,
"loss": 7.6308,
"step": 10800
},
{
"epoch": 0.7621311704656691,
"grad_norm": 5.111927032470703,
"learning_rate": 1.3759857048518043e-05,
"loss": 7.638,
"step": 10900
},
{
"epoch": 0.7691231995525102,
"grad_norm": 5.705997467041016,
"learning_rate": 1.368216602571573e-05,
"loss": 7.5954,
"step": 11000
},
{
"epoch": 0.7761152286393511,
"grad_norm": 5.417977809906006,
"learning_rate": 1.3604475002913415e-05,
"loss": 7.6037,
"step": 11100
},
{
"epoch": 0.7831072577261922,
"grad_norm": 4.27499532699585,
"learning_rate": 1.3527560890339122e-05,
"loss": 7.6405,
"step": 11200
},
{
"epoch": 0.7900992868130331,
"grad_norm": 4.530681133270264,
"learning_rate": 1.3449869867536806e-05,
"loss": 7.6396,
"step": 11300
},
{
"epoch": 0.7970913158998741,
"grad_norm": 6.06285285949707,
"learning_rate": 1.3372178844734491e-05,
"loss": 7.5898,
"step": 11400
},
{
"epoch": 0.8040833449867152,
"grad_norm": 5.323781490325928,
"learning_rate": 1.3294487821932177e-05,
"loss": 7.644,
"step": 11500
},
{
"epoch": 0.8110753740735561,
"grad_norm": 5.1687092781066895,
"learning_rate": 1.321679679912986e-05,
"loss": 7.639,
"step": 11600
},
{
"epoch": 0.8180674031603972,
"grad_norm": 13.228008270263672,
"learning_rate": 1.3139105776327546e-05,
"loss": 7.6146,
"step": 11700
},
{
"epoch": 0.8250594322472381,
"grad_norm": 14.277410507202148,
"learning_rate": 1.3061414753525233e-05,
"loss": 7.6076,
"step": 11800
},
{
"epoch": 0.8320514613340791,
"grad_norm": 5.011687755584717,
"learning_rate": 1.2983723730722917e-05,
"loss": 7.5997,
"step": 11900
},
{
"epoch": 0.8390434904209202,
"grad_norm": 5.056484699249268,
"learning_rate": 1.2906032707920602e-05,
"loss": 7.6196,
"step": 12000
},
{
"epoch": 0.8460355195077611,
"grad_norm": 5.087393283843994,
"learning_rate": 1.2828341685118286e-05,
"loss": 7.6139,
"step": 12100
},
{
"epoch": 0.8530275485946022,
"grad_norm": 6.482489109039307,
"learning_rate": 1.2750650662315971e-05,
"loss": 7.6335,
"step": 12200
},
{
"epoch": 0.8600195776814431,
"grad_norm": 5.367912292480469,
"learning_rate": 1.2672959639513656e-05,
"loss": 7.6057,
"step": 12300
},
{
"epoch": 0.8670116067682841,
"grad_norm": 5.633285999298096,
"learning_rate": 1.259526861671134e-05,
"loss": 7.5759,
"step": 12400
},
{
"epoch": 0.8740036358551252,
"grad_norm": 5.425296783447266,
"learning_rate": 1.2518354504137047e-05,
"loss": 7.6044,
"step": 12500
},
{
"epoch": 0.8809956649419661,
"grad_norm": 5.661762237548828,
"learning_rate": 1.2440663481334734e-05,
"loss": 7.589,
"step": 12600
},
{
"epoch": 0.8879876940288072,
"grad_norm": 51.0821647644043,
"learning_rate": 1.2362972458532418e-05,
"loss": 7.5871,
"step": 12700
},
{
"epoch": 0.8949797231156481,
"grad_norm": 12.381628036499023,
"learning_rate": 1.2285281435730103e-05,
"loss": 7.6161,
"step": 12800
},
{
"epoch": 0.9019717522024892,
"grad_norm": 8.360747337341309,
"learning_rate": 1.2207590412927788e-05,
"loss": 7.5797,
"step": 12900
},
{
"epoch": 0.9089637812893302,
"grad_norm": 5.329476356506348,
"learning_rate": 1.2129899390125472e-05,
"loss": 7.6202,
"step": 13000
},
{
"epoch": 0.9159558103761711,
"grad_norm": 10.23351764678955,
"learning_rate": 1.2052208367323157e-05,
"loss": 7.6116,
"step": 13100
},
{
"epoch": 0.9229478394630122,
"grad_norm": 5.831717491149902,
"learning_rate": 1.1974517344520841e-05,
"loss": 7.6253,
"step": 13200
},
{
"epoch": 0.9299398685498532,
"grad_norm": 5.673948764801025,
"learning_rate": 1.1896826321718526e-05,
"loss": 7.5891,
"step": 13300
},
{
"epoch": 0.9369318976366942,
"grad_norm": 4.9491963386535645,
"learning_rate": 1.1819135298916212e-05,
"loss": 7.5856,
"step": 13400
},
{
"epoch": 0.9439239267235352,
"grad_norm": 10.202159881591797,
"learning_rate": 1.1741444276113895e-05,
"loss": 7.5824,
"step": 13500
},
{
"epoch": 0.9509159558103761,
"grad_norm": 5.314234256744385,
"learning_rate": 1.166375325331158e-05,
"loss": 7.6288,
"step": 13600
},
{
"epoch": 0.9579079848972172,
"grad_norm": 6.260787010192871,
"learning_rate": 1.1586062230509264e-05,
"loss": 7.5653,
"step": 13700
},
{
"epoch": 0.9649000139840582,
"grad_norm": 26.669679641723633,
"learning_rate": 1.150837120770695e-05,
"loss": 7.6073,
"step": 13800
},
{
"epoch": 0.9718920430708992,
"grad_norm": 6.57592248916626,
"learning_rate": 1.1430680184904635e-05,
"loss": 7.5958,
"step": 13900
},
{
"epoch": 0.9788840721577402,
"grad_norm": 7.6479363441467285,
"learning_rate": 1.1352989162102319e-05,
"loss": 7.599,
"step": 14000
},
{
"epoch": 0.9858761012445811,
"grad_norm": 6.408242702484131,
"learning_rate": 1.1275298139300004e-05,
"loss": 7.5982,
"step": 14100
},
{
"epoch": 0.9928681303314222,
"grad_norm": 5.306247234344482,
"learning_rate": 1.1197607116497688e-05,
"loss": 7.5634,
"step": 14200
},
{
"epoch": 0.9998601594182632,
"grad_norm": 5.802295684814453,
"learning_rate": 1.1119916093695375e-05,
"loss": 7.5923,
"step": 14300
},
{
"epoch": 1.0068521885051043,
"grad_norm": 7.27552604675293,
"learning_rate": 1.104222507089306e-05,
"loss": 7.6072,
"step": 14400
},
{
"epoch": 1.013844217591945,
"grad_norm": 5.475253105163574,
"learning_rate": 1.0964534048090744e-05,
"loss": 7.5589,
"step": 14500
},
{
"epoch": 1.0208362466787861,
"grad_norm": 5.9576544761657715,
"learning_rate": 1.088684302528843e-05,
"loss": 7.6,
"step": 14600
},
{
"epoch": 1.0278282757656272,
"grad_norm": 5.9021196365356445,
"learning_rate": 1.0809152002486115e-05,
"loss": 7.5464,
"step": 14700
},
{
"epoch": 1.0348203048524682,
"grad_norm": 5.77813196182251,
"learning_rate": 1.0731460979683798e-05,
"loss": 7.5824,
"step": 14800
},
{
"epoch": 1.0418123339393093,
"grad_norm": 6.408592224121094,
"learning_rate": 1.0653769956881484e-05,
"loss": 7.5528,
"step": 14900
},
{
"epoch": 1.04880436302615,
"grad_norm": 5.504671573638916,
"learning_rate": 1.0576078934079167e-05,
"loss": 7.568,
"step": 15000
},
{
"epoch": 1.04880436302615,
"eval_loss": 7.561756610870361,
"eval_runtime": 277.2921,
"eval_samples_per_second": 1653.524,
"eval_steps_per_second": 12.921,
"step": 15000
},
{
"epoch": 1.0557963921129911,
"grad_norm": 5.96599817276001,
"learning_rate": 1.0498387911276853e-05,
"loss": 7.559,
"step": 15100
},
{
"epoch": 1.0627884211998322,
"grad_norm": 17.772111892700195,
"learning_rate": 1.0420696888474538e-05,
"loss": 7.5555,
"step": 15200
},
{
"epoch": 1.0697804502866732,
"grad_norm": 8.23525333404541,
"learning_rate": 1.0343005865672222e-05,
"loss": 7.552,
"step": 15300
},
{
"epoch": 1.0767724793735143,
"grad_norm": 5.616782188415527,
"learning_rate": 1.0265314842869907e-05,
"loss": 7.5851,
"step": 15400
},
{
"epoch": 1.083764508460355,
"grad_norm": 6.9555888175964355,
"learning_rate": 1.018762382006759e-05,
"loss": 7.5256,
"step": 15500
},
{
"epoch": 1.0907565375471961,
"grad_norm": 6.269557952880859,
"learning_rate": 1.0109932797265276e-05,
"loss": 7.5683,
"step": 15600
},
{
"epoch": 1.0977485666340372,
"grad_norm": 5.618041038513184,
"learning_rate": 1.0032241774462962e-05,
"loss": 7.5909,
"step": 15700
},
{
"epoch": 1.1047405957208782,
"grad_norm": 7.424361705780029,
"learning_rate": 9.954550751660647e-06,
"loss": 7.5655,
"step": 15800
},
{
"epoch": 1.1117326248077193,
"grad_norm": 6.244665622711182,
"learning_rate": 9.87685972885833e-06,
"loss": 7.5476,
"step": 15900
},
{
"epoch": 1.11872465389456,
"grad_norm": 5.86741304397583,
"learning_rate": 9.799168706056016e-06,
"loss": 7.5721,
"step": 16000
},
{
"epoch": 1.1257166829814012,
"grad_norm": 6.809812068939209,
"learning_rate": 9.721477683253701e-06,
"loss": 7.5593,
"step": 16100
},
{
"epoch": 1.1327087120682422,
"grad_norm": 5.530270576477051,
"learning_rate": 9.643786660451385e-06,
"loss": 7.5783,
"step": 16200
},
{
"epoch": 1.1397007411550832,
"grad_norm": 6.193401336669922,
"learning_rate": 9.56609563764907e-06,
"loss": 7.5905,
"step": 16300
},
{
"epoch": 1.1466927702419243,
"grad_norm": 6.046769142150879,
"learning_rate": 9.488404614846756e-06,
"loss": 7.542,
"step": 16400
},
{
"epoch": 1.153684799328765,
"grad_norm": 9.423077583312988,
"learning_rate": 9.410713592044441e-06,
"loss": 7.5794,
"step": 16500
},
{
"epoch": 1.1606768284156062,
"grad_norm": 6.115114688873291,
"learning_rate": 9.333022569242125e-06,
"loss": 7.5669,
"step": 16600
},
{
"epoch": 1.1676688575024472,
"grad_norm": 7.971080780029297,
"learning_rate": 9.25533154643981e-06,
"loss": 7.5738,
"step": 16700
},
{
"epoch": 1.1746608865892882,
"grad_norm": 6.336145877838135,
"learning_rate": 9.178417433865517e-06,
"loss": 7.5431,
"step": 16800
},
{
"epoch": 1.1816529156761293,
"grad_norm": 29.671520233154297,
"learning_rate": 9.100726411063202e-06,
"loss": 7.5401,
"step": 16900
},
{
"epoch": 1.1886449447629701,
"grad_norm": 5.954431533813477,
"learning_rate": 9.023035388260886e-06,
"loss": 7.5629,
"step": 17000
}
],
"logging_steps": 100,
"max_steps": 28604,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 0.0,
"train_batch_size": 128,
"trial_name": null,
"trial_params": null
}