Ukr-Lit-SP / trainer_state.json
stas-l's picture
Upload 7 files
d8ecaaa verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 10686,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.02807411566535654,
"grad_norm": 8.254485130310059,
"learning_rate": 4.9550814149354296e-05,
"loss": 5.1202,
"step": 100
},
{
"epoch": 0.05614823133071308,
"grad_norm": 14.410130500793457,
"learning_rate": 4.908291222159835e-05,
"loss": 4.827,
"step": 200
},
{
"epoch": 0.08422234699606962,
"grad_norm": 12.091243743896484,
"learning_rate": 4.861501029384241e-05,
"loss": 4.8012,
"step": 300
},
{
"epoch": 0.11229646266142616,
"grad_norm": 9.615291595458984,
"learning_rate": 4.814710836608647e-05,
"loss": 4.7478,
"step": 400
},
{
"epoch": 0.1403705783267827,
"grad_norm": 7.660737991333008,
"learning_rate": 4.767920643833052e-05,
"loss": 4.7,
"step": 500
},
{
"epoch": 0.16844469399213924,
"grad_norm": 8.466590881347656,
"learning_rate": 4.7211304510574585e-05,
"loss": 4.7237,
"step": 600
},
{
"epoch": 0.1965188096574958,
"grad_norm": 9.133238792419434,
"learning_rate": 4.674340258281865e-05,
"loss": 4.6992,
"step": 700
},
{
"epoch": 0.22459292532285233,
"grad_norm": 11.180377006530762,
"learning_rate": 4.62755006550627e-05,
"loss": 4.6911,
"step": 800
},
{
"epoch": 0.25266704098820886,
"grad_norm": 9.834083557128906,
"learning_rate": 4.5807598727306764e-05,
"loss": 4.6565,
"step": 900
},
{
"epoch": 0.2807411566535654,
"grad_norm": 8.723132133483887,
"learning_rate": 4.533969679955082e-05,
"loss": 4.6333,
"step": 1000
},
{
"epoch": 0.308815272318922,
"grad_norm": 12.12197494506836,
"learning_rate": 4.4871794871794874e-05,
"loss": 4.5906,
"step": 1100
},
{
"epoch": 0.3368893879842785,
"grad_norm": 10.753365516662598,
"learning_rate": 4.440389294403893e-05,
"loss": 4.5926,
"step": 1200
},
{
"epoch": 0.36496350364963503,
"grad_norm": 8.904462814331055,
"learning_rate": 4.393599101628299e-05,
"loss": 4.5735,
"step": 1300
},
{
"epoch": 0.3930376193149916,
"grad_norm": 7.0933098793029785,
"learning_rate": 4.346808908852705e-05,
"loss": 4.6032,
"step": 1400
},
{
"epoch": 0.4211117349803481,
"grad_norm": 6.25530481338501,
"learning_rate": 4.30001871607711e-05,
"loss": 4.6286,
"step": 1500
},
{
"epoch": 0.44918585064570465,
"grad_norm": 9.821544647216797,
"learning_rate": 4.2532285233015164e-05,
"loss": 4.5785,
"step": 1600
},
{
"epoch": 0.4772599663110612,
"grad_norm": 8.794018745422363,
"learning_rate": 4.206438330525922e-05,
"loss": 4.5527,
"step": 1700
},
{
"epoch": 0.5053340819764177,
"grad_norm": 9.12629222869873,
"learning_rate": 4.1596481377503274e-05,
"loss": 4.5306,
"step": 1800
},
{
"epoch": 0.5334081976417743,
"grad_norm": 8.344796180725098,
"learning_rate": 4.1128579449747336e-05,
"loss": 4.4843,
"step": 1900
},
{
"epoch": 0.5614823133071308,
"grad_norm": 11.365053176879883,
"learning_rate": 4.066067752199139e-05,
"loss": 4.4841,
"step": 2000
},
{
"epoch": 0.5895564289724874,
"grad_norm": 8.378218650817871,
"learning_rate": 4.0192775594235447e-05,
"loss": 4.5196,
"step": 2100
},
{
"epoch": 0.617630544637844,
"grad_norm": 11.260799407958984,
"learning_rate": 3.972487366647951e-05,
"loss": 4.4967,
"step": 2200
},
{
"epoch": 0.6457046603032004,
"grad_norm": 11.992345809936523,
"learning_rate": 3.9256971738723564e-05,
"loss": 4.509,
"step": 2300
},
{
"epoch": 0.673778775968557,
"grad_norm": 15.889375686645508,
"learning_rate": 3.878906981096762e-05,
"loss": 4.5183,
"step": 2400
},
{
"epoch": 0.7018528916339135,
"grad_norm": 6.526124954223633,
"learning_rate": 3.832116788321168e-05,
"loss": 4.4316,
"step": 2500
},
{
"epoch": 0.7299270072992701,
"grad_norm": 8.866271018981934,
"learning_rate": 3.7853265955455736e-05,
"loss": 4.4426,
"step": 2600
},
{
"epoch": 0.7580011229646266,
"grad_norm": 8.329658508300781,
"learning_rate": 3.73853640276998e-05,
"loss": 4.445,
"step": 2700
},
{
"epoch": 0.7860752386299832,
"grad_norm": 8.942349433898926,
"learning_rate": 3.691746209994385e-05,
"loss": 4.4743,
"step": 2800
},
{
"epoch": 0.8141493542953397,
"grad_norm": 7.263224124908447,
"learning_rate": 3.6449560172187915e-05,
"loss": 4.4459,
"step": 2900
},
{
"epoch": 0.8422234699606962,
"grad_norm": 8.767597198486328,
"learning_rate": 3.598165824443197e-05,
"loss": 4.3944,
"step": 3000
},
{
"epoch": 0.8702975856260527,
"grad_norm": 7.760046482086182,
"learning_rate": 3.5513756316676025e-05,
"loss": 4.4683,
"step": 3100
},
{
"epoch": 0.8983717012914093,
"grad_norm": 13.036920547485352,
"learning_rate": 3.504585438892009e-05,
"loss": 4.3891,
"step": 3200
},
{
"epoch": 0.9264458169567659,
"grad_norm": 10.132473945617676,
"learning_rate": 3.457795246116414e-05,
"loss": 4.3914,
"step": 3300
},
{
"epoch": 0.9545199326221224,
"grad_norm": 10.985709190368652,
"learning_rate": 3.41100505334082e-05,
"loss": 4.3861,
"step": 3400
},
{
"epoch": 0.982594048287479,
"grad_norm": 12.080757141113281,
"learning_rate": 3.364214860565226e-05,
"loss": 4.4272,
"step": 3500
},
{
"epoch": 1.0106681639528354,
"grad_norm": 10.2216157913208,
"learning_rate": 3.3174246677896315e-05,
"loss": 4.1802,
"step": 3600
},
{
"epoch": 1.038742279618192,
"grad_norm": 7.825625896453857,
"learning_rate": 3.270634475014037e-05,
"loss": 3.7209,
"step": 3700
},
{
"epoch": 1.0668163952835485,
"grad_norm": 9.543134689331055,
"learning_rate": 3.223844282238443e-05,
"loss": 3.7451,
"step": 3800
},
{
"epoch": 1.094890510948905,
"grad_norm": 6.093784809112549,
"learning_rate": 3.177054089462849e-05,
"loss": 3.7287,
"step": 3900
},
{
"epoch": 1.1229646266142617,
"grad_norm": 9.133221626281738,
"learning_rate": 3.130263896687254e-05,
"loss": 3.7896,
"step": 4000
},
{
"epoch": 1.1510387422796182,
"grad_norm": 8.328789710998535,
"learning_rate": 3.0834737039116604e-05,
"loss": 3.7729,
"step": 4100
},
{
"epoch": 1.1791128579449748,
"grad_norm": 12.665905952453613,
"learning_rate": 3.036683511136066e-05,
"loss": 3.768,
"step": 4200
},
{
"epoch": 1.2071869736103313,
"grad_norm": 9.492897987365723,
"learning_rate": 2.9898933183604715e-05,
"loss": 3.7355,
"step": 4300
},
{
"epoch": 1.235261089275688,
"grad_norm": 10.17910385131836,
"learning_rate": 2.9431031255848773e-05,
"loss": 3.8411,
"step": 4400
},
{
"epoch": 1.2633352049410442,
"grad_norm": 7.939426898956299,
"learning_rate": 2.896312932809283e-05,
"loss": 3.7935,
"step": 4500
},
{
"epoch": 1.2914093206064008,
"grad_norm": 8.300692558288574,
"learning_rate": 2.8495227400336887e-05,
"loss": 3.7495,
"step": 4600
},
{
"epoch": 1.3194834362717573,
"grad_norm": 11.419922828674316,
"learning_rate": 2.8027325472580952e-05,
"loss": 3.8478,
"step": 4700
},
{
"epoch": 1.347557551937114,
"grad_norm": 12.745059967041016,
"learning_rate": 2.7559423544825007e-05,
"loss": 3.8148,
"step": 4800
},
{
"epoch": 1.3756316676024705,
"grad_norm": 12.494312286376953,
"learning_rate": 2.7091521617069066e-05,
"loss": 3.7696,
"step": 4900
},
{
"epoch": 1.403705783267827,
"grad_norm": 7.439785480499268,
"learning_rate": 2.6623619689313124e-05,
"loss": 3.7559,
"step": 5000
},
{
"epoch": 1.4317798989331836,
"grad_norm": 12.009989738464355,
"learning_rate": 2.615571776155718e-05,
"loss": 3.7759,
"step": 5100
},
{
"epoch": 1.4598540145985401,
"grad_norm": 10.613336563110352,
"learning_rate": 2.5687815833801238e-05,
"loss": 3.7577,
"step": 5200
},
{
"epoch": 1.4879281302638967,
"grad_norm": 10.656264305114746,
"learning_rate": 2.5219913906045293e-05,
"loss": 3.7515,
"step": 5300
},
{
"epoch": 1.5160022459292533,
"grad_norm": 9.189164161682129,
"learning_rate": 2.4752011978289352e-05,
"loss": 3.8009,
"step": 5400
},
{
"epoch": 1.5440763615946098,
"grad_norm": 9.947083473205566,
"learning_rate": 2.428411005053341e-05,
"loss": 3.8175,
"step": 5500
},
{
"epoch": 1.5721504772599664,
"grad_norm": 14.514716148376465,
"learning_rate": 2.3816208122777466e-05,
"loss": 3.7141,
"step": 5600
},
{
"epoch": 1.600224592925323,
"grad_norm": 8.071717262268066,
"learning_rate": 2.3348306195021524e-05,
"loss": 3.7252,
"step": 5700
},
{
"epoch": 1.6282987085906795,
"grad_norm": 11.72280216217041,
"learning_rate": 2.2880404267265583e-05,
"loss": 3.7949,
"step": 5800
},
{
"epoch": 1.656372824256036,
"grad_norm": 9.227740287780762,
"learning_rate": 2.2412502339509638e-05,
"loss": 3.733,
"step": 5900
},
{
"epoch": 1.6844469399213926,
"grad_norm": 13.090754508972168,
"learning_rate": 2.1944600411753697e-05,
"loss": 3.8362,
"step": 6000
},
{
"epoch": 1.7125210555867492,
"grad_norm": 12.168136596679688,
"learning_rate": 2.1476698483997755e-05,
"loss": 3.7261,
"step": 6100
},
{
"epoch": 1.7405951712521057,
"grad_norm": 8.332971572875977,
"learning_rate": 2.1008796556241814e-05,
"loss": 3.8215,
"step": 6200
},
{
"epoch": 1.768669286917462,
"grad_norm": 8.5094633102417,
"learning_rate": 2.0540894628485872e-05,
"loss": 3.804,
"step": 6300
},
{
"epoch": 1.7967434025828186,
"grad_norm": 11.575583457946777,
"learning_rate": 2.0072992700729927e-05,
"loss": 3.7369,
"step": 6400
},
{
"epoch": 1.8248175182481752,
"grad_norm": 12.597991943359375,
"learning_rate": 1.9605090772973986e-05,
"loss": 3.7377,
"step": 6500
},
{
"epoch": 1.8528916339135317,
"grad_norm": 9.923151016235352,
"learning_rate": 1.9137188845218044e-05,
"loss": 3.7808,
"step": 6600
},
{
"epoch": 1.8809657495788883,
"grad_norm": 10.516582489013672,
"learning_rate": 1.86692869174621e-05,
"loss": 3.7584,
"step": 6700
},
{
"epoch": 1.9090398652442448,
"grad_norm": 8.432146072387695,
"learning_rate": 1.8201384989706158e-05,
"loss": 3.7837,
"step": 6800
},
{
"epoch": 1.9371139809096012,
"grad_norm": 12.533796310424805,
"learning_rate": 1.7733483061950217e-05,
"loss": 3.6908,
"step": 6900
},
{
"epoch": 1.9651880965749577,
"grad_norm": 11.924694061279297,
"learning_rate": 1.7265581134194272e-05,
"loss": 3.7846,
"step": 7000
},
{
"epoch": 1.9932622122403143,
"grad_norm": 10.164834976196289,
"learning_rate": 1.6797679206438334e-05,
"loss": 3.766,
"step": 7100
},
{
"epoch": 2.021336327905671,
"grad_norm": 9.073919296264648,
"learning_rate": 1.633445629795995e-05,
"loss": 3.3417,
"step": 7200
},
{
"epoch": 2.0494104435710274,
"grad_norm": 10.306299209594727,
"learning_rate": 1.5866554370204005e-05,
"loss": 3.2644,
"step": 7300
},
{
"epoch": 2.077484559236384,
"grad_norm": 11.006250381469727,
"learning_rate": 1.5398652442448064e-05,
"loss": 3.243,
"step": 7400
},
{
"epoch": 2.1055586749017405,
"grad_norm": 9.138731956481934,
"learning_rate": 1.4930750514692121e-05,
"loss": 3.2938,
"step": 7500
},
{
"epoch": 2.133632790567097,
"grad_norm": 9.330265998840332,
"learning_rate": 1.4462848586936178e-05,
"loss": 3.247,
"step": 7600
},
{
"epoch": 2.1617069062324537,
"grad_norm": 9.435338973999023,
"learning_rate": 1.3994946659180236e-05,
"loss": 3.2847,
"step": 7700
},
{
"epoch": 2.18978102189781,
"grad_norm": 9.446837425231934,
"learning_rate": 1.3527044731424293e-05,
"loss": 3.2522,
"step": 7800
},
{
"epoch": 2.2178551375631668,
"grad_norm": 11.070699691772461,
"learning_rate": 1.305914280366835e-05,
"loss": 3.2073,
"step": 7900
},
{
"epoch": 2.2459292532285233,
"grad_norm": 9.919466018676758,
"learning_rate": 1.259124087591241e-05,
"loss": 3.2816,
"step": 8000
},
{
"epoch": 2.27400336889388,
"grad_norm": 11.001142501831055,
"learning_rate": 1.2123338948156467e-05,
"loss": 3.2669,
"step": 8100
},
{
"epoch": 2.3020774845592364,
"grad_norm": 10.917780876159668,
"learning_rate": 1.1655437020400524e-05,
"loss": 3.2488,
"step": 8200
},
{
"epoch": 2.330151600224593,
"grad_norm": 10.114619255065918,
"learning_rate": 1.1187535092644583e-05,
"loss": 3.2317,
"step": 8300
},
{
"epoch": 2.3582257158899496,
"grad_norm": 8.730778694152832,
"learning_rate": 1.071963316488864e-05,
"loss": 3.274,
"step": 8400
},
{
"epoch": 2.386299831555306,
"grad_norm": 7.530703544616699,
"learning_rate": 1.0251731237132698e-05,
"loss": 3.2506,
"step": 8500
},
{
"epoch": 2.4143739472206627,
"grad_norm": 13.161232948303223,
"learning_rate": 9.783829309376755e-06,
"loss": 3.3056,
"step": 8600
},
{
"epoch": 2.4424480628860192,
"grad_norm": 7.7642598152160645,
"learning_rate": 9.315927381620813e-06,
"loss": 3.2745,
"step": 8700
},
{
"epoch": 2.470522178551376,
"grad_norm": 8.774062156677246,
"learning_rate": 8.84802545386487e-06,
"loss": 3.263,
"step": 8800
},
{
"epoch": 2.4985962942167323,
"grad_norm": 13.077786445617676,
"learning_rate": 8.380123526108929e-06,
"loss": 3.2418,
"step": 8900
},
{
"epoch": 2.5266704098820885,
"grad_norm": 20.591571807861328,
"learning_rate": 7.912221598352986e-06,
"loss": 3.3027,
"step": 9000
},
{
"epoch": 2.554744525547445,
"grad_norm": 10.915307998657227,
"learning_rate": 7.444319670597043e-06,
"loss": 3.2292,
"step": 9100
},
{
"epoch": 2.5828186412128016,
"grad_norm": 10.937311172485352,
"learning_rate": 6.976417742841101e-06,
"loss": 3.3281,
"step": 9200
},
{
"epoch": 2.610892756878158,
"grad_norm": 8.748698234558105,
"learning_rate": 6.508515815085159e-06,
"loss": 3.2823,
"step": 9300
},
{
"epoch": 2.6389668725435147,
"grad_norm": 8.032021522521973,
"learning_rate": 6.040613887329216e-06,
"loss": 3.2265,
"step": 9400
},
{
"epoch": 2.6670409882088713,
"grad_norm": 14.668137550354004,
"learning_rate": 5.5727119595732735e-06,
"loss": 3.2673,
"step": 9500
},
{
"epoch": 2.695115103874228,
"grad_norm": 9.834213256835938,
"learning_rate": 5.104810031817331e-06,
"loss": 3.2213,
"step": 9600
},
{
"epoch": 2.7231892195395844,
"grad_norm": 14.294054985046387,
"learning_rate": 4.636908104061389e-06,
"loss": 3.2487,
"step": 9700
},
{
"epoch": 2.751263335204941,
"grad_norm": 8.172629356384277,
"learning_rate": 4.169006176305447e-06,
"loss": 3.2598,
"step": 9800
},
{
"epoch": 2.7793374508702975,
"grad_norm": 18.083383560180664,
"learning_rate": 3.7011042485495043e-06,
"loss": 3.2655,
"step": 9900
},
{
"epoch": 2.807411566535654,
"grad_norm": 13.841841697692871,
"learning_rate": 3.2332023207935616e-06,
"loss": 3.2676,
"step": 10000
},
{
"epoch": 2.8354856822010106,
"grad_norm": 16.98509407043457,
"learning_rate": 2.7653003930376193e-06,
"loss": 3.2499,
"step": 10100
},
{
"epoch": 2.863559797866367,
"grad_norm": 10.365632057189941,
"learning_rate": 2.297398465281677e-06,
"loss": 3.2457,
"step": 10200
},
{
"epoch": 2.8916339135317237,
"grad_norm": 10.886728286743164,
"learning_rate": 1.8294965375257348e-06,
"loss": 3.2445,
"step": 10300
},
{
"epoch": 2.9197080291970803,
"grad_norm": 10.890334129333496,
"learning_rate": 1.3615946097697923e-06,
"loss": 3.211,
"step": 10400
},
{
"epoch": 2.947782144862437,
"grad_norm": 11.197315216064453,
"learning_rate": 8.936926820138499e-07,
"loss": 3.2483,
"step": 10500
},
{
"epoch": 2.9758562605277934,
"grad_norm": 9.767260551452637,
"learning_rate": 4.2579075425790754e-07,
"loss": 3.2594,
"step": 10600
}
],
"logging_steps": 100,
"max_steps": 10686,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 5583353462784000.0,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}