|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.1213406873143827, |
|
"eval_steps": 600, |
|
"global_step": 2500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00848536274925753, |
|
"grad_norm": 0.4898678891363344, |
|
"learning_rate": 8.488964346349746e-07, |
|
"loss": 1.8056, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01697072549851506, |
|
"grad_norm": 0.3537473179717183, |
|
"learning_rate": 1.6977928692699491e-06, |
|
"loss": 1.7621, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.025456088247772592, |
|
"grad_norm": 0.28215953004159977, |
|
"learning_rate": 2.546689303904924e-06, |
|
"loss": 1.7571, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03394145099703012, |
|
"grad_norm": 0.27446565146764923, |
|
"learning_rate": 3.3955857385398982e-06, |
|
"loss": 1.7136, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04242681374628765, |
|
"grad_norm": 0.17051549768176558, |
|
"learning_rate": 4.244482173174873e-06, |
|
"loss": 1.6767, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.050912176495545185, |
|
"grad_norm": 0.17763882467320422, |
|
"learning_rate": 5.093378607809848e-06, |
|
"loss": 1.6371, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05939753924480271, |
|
"grad_norm": 0.14311462596290048, |
|
"learning_rate": 5.942275042444822e-06, |
|
"loss": 1.6324, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06788290199406025, |
|
"grad_norm": 0.1659540846071645, |
|
"learning_rate": 6.7911714770797965e-06, |
|
"loss": 1.6062, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07636826474331777, |
|
"grad_norm": 0.20064072815620043, |
|
"learning_rate": 7.640067911714771e-06, |
|
"loss": 1.5832, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0848536274925753, |
|
"grad_norm": 0.2179045681711979, |
|
"learning_rate": 8.488964346349745e-06, |
|
"loss": 1.5898, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09333899024183284, |
|
"grad_norm": 0.23866012053128668, |
|
"learning_rate": 9.337860780984721e-06, |
|
"loss": 1.5924, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.10182435299109037, |
|
"grad_norm": 0.18578051776430282, |
|
"learning_rate": 1.0186757215619695e-05, |
|
"loss": 1.5877, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1103097157403479, |
|
"grad_norm": 0.2216509707409362, |
|
"learning_rate": 1.103565365025467e-05, |
|
"loss": 1.5947, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11879507848960542, |
|
"grad_norm": 0.20427142255694086, |
|
"learning_rate": 1.1884550084889643e-05, |
|
"loss": 1.5841, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12728044123886295, |
|
"grad_norm": 0.1765851415675038, |
|
"learning_rate": 1.2733446519524619e-05, |
|
"loss": 1.5878, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1357658039881205, |
|
"grad_norm": 0.1769355117060811, |
|
"learning_rate": 1.3582342954159593e-05, |
|
"loss": 1.5795, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.14425116673737803, |
|
"grad_norm": 0.1617675663096666, |
|
"learning_rate": 1.4431239388794569e-05, |
|
"loss": 1.5549, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.15273652948663555, |
|
"grad_norm": 0.17302259072151574, |
|
"learning_rate": 1.5280135823429543e-05, |
|
"loss": 1.5808, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1612218922358931, |
|
"grad_norm": 0.16876039012432806, |
|
"learning_rate": 1.6129032258064517e-05, |
|
"loss": 1.5676, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1697072549851506, |
|
"grad_norm": 0.19627360154037596, |
|
"learning_rate": 1.697792869269949e-05, |
|
"loss": 1.5598, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17819261773440814, |
|
"grad_norm": 0.16078510362361015, |
|
"learning_rate": 1.7826825127334465e-05, |
|
"loss": 1.5667, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.18667798048366568, |
|
"grad_norm": 0.16044786518959703, |
|
"learning_rate": 1.8675721561969442e-05, |
|
"loss": 1.5815, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.1951633432329232, |
|
"grad_norm": 0.15656958873834717, |
|
"learning_rate": 1.9524617996604416e-05, |
|
"loss": 1.5576, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.20364870598218074, |
|
"grad_norm": 0.1687290471357602, |
|
"learning_rate": 2.037351443123939e-05, |
|
"loss": 1.5453, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21213406873143828, |
|
"grad_norm": 0.1519017348276184, |
|
"learning_rate": 2.1222410865874364e-05, |
|
"loss": 1.5554, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2206194314806958, |
|
"grad_norm": 0.15761892005160086, |
|
"learning_rate": 2.207130730050934e-05, |
|
"loss": 1.5494, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.22910479422995333, |
|
"grad_norm": 0.16857088482977495, |
|
"learning_rate": 2.2920203735144312e-05, |
|
"loss": 1.5794, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.23759015697921085, |
|
"grad_norm": 0.1678705209913503, |
|
"learning_rate": 2.3769100169779286e-05, |
|
"loss": 1.5373, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2460755197284684, |
|
"grad_norm": 0.14812649566587394, |
|
"learning_rate": 2.461799660441426e-05, |
|
"loss": 1.5504, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2545608824777259, |
|
"grad_norm": 0.17651916734325857, |
|
"learning_rate": 2.5466893039049238e-05, |
|
"loss": 1.5607, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.26304624522698344, |
|
"grad_norm": 0.14883055338507856, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 1.5311, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.271531607976241, |
|
"grad_norm": 0.15787522753231265, |
|
"learning_rate": 2.7164685908319186e-05, |
|
"loss": 1.5656, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.2800169707254985, |
|
"grad_norm": 0.1625232940237689, |
|
"learning_rate": 2.801358234295416e-05, |
|
"loss": 1.5686, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.28850233347475607, |
|
"grad_norm": 0.18505951289343867, |
|
"learning_rate": 2.8862478777589137e-05, |
|
"loss": 1.5474, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.29698769622401355, |
|
"grad_norm": 0.13785772316349984, |
|
"learning_rate": 2.9711375212224108e-05, |
|
"loss": 1.5696, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3054730589732711, |
|
"grad_norm": 0.13531274658248552, |
|
"learning_rate": 3.0560271646859086e-05, |
|
"loss": 1.5551, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.31395842172252864, |
|
"grad_norm": 0.1366381415368909, |
|
"learning_rate": 3.140916808149406e-05, |
|
"loss": 1.524, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3224437844717862, |
|
"grad_norm": 0.14587220569353926, |
|
"learning_rate": 3.2258064516129034e-05, |
|
"loss": 1.5515, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3309291472210437, |
|
"grad_norm": 0.13336349383744864, |
|
"learning_rate": 3.310696095076401e-05, |
|
"loss": 1.5457, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.3394145099703012, |
|
"grad_norm": 0.1772016947970983, |
|
"learning_rate": 3.395585738539898e-05, |
|
"loss": 1.5582, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.34789987271955874, |
|
"grad_norm": 0.13819420575084573, |
|
"learning_rate": 3.4804753820033956e-05, |
|
"loss": 1.5326, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.3563852354688163, |
|
"grad_norm": 0.12729862167862188, |
|
"learning_rate": 3.565365025466893e-05, |
|
"loss": 1.5387, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3648705982180738, |
|
"grad_norm": 0.11777082851399363, |
|
"learning_rate": 3.6502546689303904e-05, |
|
"loss": 1.5587, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.37335596096733137, |
|
"grad_norm": 0.15372268131323022, |
|
"learning_rate": 3.7351443123938885e-05, |
|
"loss": 1.5362, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.3818413237165889, |
|
"grad_norm": 0.12616185572252248, |
|
"learning_rate": 3.820033955857386e-05, |
|
"loss": 1.5548, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3903266864658464, |
|
"grad_norm": 0.1311200786303391, |
|
"learning_rate": 3.904923599320883e-05, |
|
"loss": 1.5409, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.39881204921510394, |
|
"grad_norm": 0.1707919112561785, |
|
"learning_rate": 3.989813242784381e-05, |
|
"loss": 1.5509, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4072974119643615, |
|
"grad_norm": 0.14660149264284913, |
|
"learning_rate": 4.074702886247878e-05, |
|
"loss": 1.5433, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.415782774713619, |
|
"grad_norm": 0.12478895483834351, |
|
"learning_rate": 4.1595925297113755e-05, |
|
"loss": 1.5382, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.42426813746287656, |
|
"grad_norm": 0.12327957445795817, |
|
"learning_rate": 4.244482173174873e-05, |
|
"loss": 1.5515, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.43275350021213405, |
|
"grad_norm": 0.12922777738650987, |
|
"learning_rate": 4.32937181663837e-05, |
|
"loss": 1.5688, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.4412388629613916, |
|
"grad_norm": 0.12486802189783415, |
|
"learning_rate": 4.414261460101868e-05, |
|
"loss": 1.5452, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.44972422571064913, |
|
"grad_norm": 0.1360610874577123, |
|
"learning_rate": 4.499151103565366e-05, |
|
"loss": 1.5493, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.45820958845990667, |
|
"grad_norm": 0.1884897685356775, |
|
"learning_rate": 4.5840407470288625e-05, |
|
"loss": 1.5511, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.4666949512091642, |
|
"grad_norm": 0.12446302384809525, |
|
"learning_rate": 4.6689303904923606e-05, |
|
"loss": 1.5458, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.4751803139584217, |
|
"grad_norm": 0.13169591804768588, |
|
"learning_rate": 4.753820033955857e-05, |
|
"loss": 1.5569, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.48366567670767924, |
|
"grad_norm": 0.1343809247449631, |
|
"learning_rate": 4.8387096774193554e-05, |
|
"loss": 1.5408, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.4921510394569368, |
|
"grad_norm": 0.14024589853602, |
|
"learning_rate": 4.923599320882852e-05, |
|
"loss": 1.5487, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5006364022061943, |
|
"grad_norm": 0.16240429253875313, |
|
"learning_rate": 4.999999560970061e-05, |
|
"loss": 1.5488, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5091217649554518, |
|
"grad_norm": 0.12575424857894482, |
|
"learning_rate": 4.999946877563971e-05, |
|
"loss": 1.532, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5091217649554518, |
|
"eval_loss": 1.519254446029663, |
|
"eval_runtime": 53.3242, |
|
"eval_samples_per_second": 7.145, |
|
"eval_steps_per_second": 0.9, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5176071277047094, |
|
"grad_norm": 0.18688482756329736, |
|
"learning_rate": 4.999806390290309e-05, |
|
"loss": 1.5544, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5260924904539669, |
|
"grad_norm": 0.12425469431830571, |
|
"learning_rate": 4.999578104083307e-05, |
|
"loss": 1.5443, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5345778532032245, |
|
"grad_norm": 0.1299027485420099, |
|
"learning_rate": 4.999262026960902e-05, |
|
"loss": 1.5569, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.543063215952482, |
|
"grad_norm": 0.11441754852508934, |
|
"learning_rate": 4.998858170024449e-05, |
|
"loss": 1.5316, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.5515485787017395, |
|
"grad_norm": 0.14888547248976478, |
|
"learning_rate": 4.998366547458326e-05, |
|
"loss": 1.5177, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.560033941450997, |
|
"grad_norm": 0.14859292774768867, |
|
"learning_rate": 4.997787176529449e-05, |
|
"loss": 1.5394, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5685193042002545, |
|
"grad_norm": 0.12499154376539734, |
|
"learning_rate": 4.997120077586651e-05, |
|
"loss": 1.5554, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.5770046669495121, |
|
"grad_norm": 0.1218974898058821, |
|
"learning_rate": 4.9963652740599774e-05, |
|
"loss": 1.5335, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.5854900296987696, |
|
"grad_norm": 0.1273110498715124, |
|
"learning_rate": 4.995522792459859e-05, |
|
"loss": 1.5349, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.5939753924480271, |
|
"grad_norm": 0.12115412881719101, |
|
"learning_rate": 4.994592662376183e-05, |
|
"loss": 1.5419, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6024607551972847, |
|
"grad_norm": 0.14855096330233286, |
|
"learning_rate": 4.99357491647725e-05, |
|
"loss": 1.513, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6109461179465422, |
|
"grad_norm": 0.11407988659327956, |
|
"learning_rate": 4.992469590508628e-05, |
|
"loss": 1.5243, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6194314806957998, |
|
"grad_norm": 0.1197712643781127, |
|
"learning_rate": 4.9912767232919035e-05, |
|
"loss": 1.5177, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.6279168434450573, |
|
"grad_norm": 0.12400515877262065, |
|
"learning_rate": 4.9899963567233074e-05, |
|
"loss": 1.5619, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6364022061943148, |
|
"grad_norm": 0.12250385257708406, |
|
"learning_rate": 4.988628535772249e-05, |
|
"loss": 1.539, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6448875689435724, |
|
"grad_norm": 0.1262441090496857, |
|
"learning_rate": 4.987173308479738e-05, |
|
"loss": 1.5195, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.6533729316928298, |
|
"grad_norm": 0.12459694416473029, |
|
"learning_rate": 4.985630725956694e-05, |
|
"loss": 1.5462, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.6618582944420874, |
|
"grad_norm": 0.12985189006106762, |
|
"learning_rate": 4.9840008423821527e-05, |
|
"loss": 1.5113, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.6703436571913449, |
|
"grad_norm": 0.12689306141471304, |
|
"learning_rate": 4.9822837150013636e-05, |
|
"loss": 1.5201, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.6788290199406024, |
|
"grad_norm": 0.15393156370587963, |
|
"learning_rate": 4.980479404123778e-05, |
|
"loss": 1.5121, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.68731438268986, |
|
"grad_norm": 0.13213701895207608, |
|
"learning_rate": 4.978587973120931e-05, |
|
"loss": 1.5307, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.6957997454391175, |
|
"grad_norm": 0.11561354931316294, |
|
"learning_rate": 4.9766094884242184e-05, |
|
"loss": 1.5316, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7042851081883751, |
|
"grad_norm": 0.12414772399330044, |
|
"learning_rate": 4.974544019522559e-05, |
|
"loss": 1.5148, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7127704709376326, |
|
"grad_norm": 0.1171652849153521, |
|
"learning_rate": 4.972391638959959e-05, |
|
"loss": 1.5096, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7212558336868902, |
|
"grad_norm": 0.12868937349582316, |
|
"learning_rate": 4.9701524223329585e-05, |
|
"loss": 1.5282, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7297411964361477, |
|
"grad_norm": 0.1200015077117309, |
|
"learning_rate": 4.967826448287981e-05, |
|
"loss": 1.5512, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.7382265591854051, |
|
"grad_norm": 0.12340885660045105, |
|
"learning_rate": 4.96541379851857e-05, |
|
"loss": 1.5394, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.7467119219346627, |
|
"grad_norm": 0.12976937691467555, |
|
"learning_rate": 4.962914557762517e-05, |
|
"loss": 1.51, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.7551972846839202, |
|
"grad_norm": 0.11912878476038466, |
|
"learning_rate": 4.9603288137988905e-05, |
|
"loss": 1.5294, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.7636826474331778, |
|
"grad_norm": 0.1299625480337927, |
|
"learning_rate": 4.957656657444947e-05, |
|
"loss": 1.507, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.7721680101824353, |
|
"grad_norm": 0.12380144459698468, |
|
"learning_rate": 4.954898182552946e-05, |
|
"loss": 1.5376, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.7806533729316928, |
|
"grad_norm": 0.13139339643682763, |
|
"learning_rate": 4.9520534860068535e-05, |
|
"loss": 1.5291, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.7891387356809504, |
|
"grad_norm": 0.13088956203983898, |
|
"learning_rate": 4.949122667718935e-05, |
|
"loss": 1.5239, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.7976240984302079, |
|
"grad_norm": 0.12586052988453703, |
|
"learning_rate": 4.94610583062625e-05, |
|
"loss": 1.5525, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8061094611794655, |
|
"grad_norm": 0.12020996031652877, |
|
"learning_rate": 4.943003080687035e-05, |
|
"loss": 1.5525, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.814594823928723, |
|
"grad_norm": 0.12866375954060869, |
|
"learning_rate": 4.9398145268769856e-05, |
|
"loss": 1.5266, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8230801866779804, |
|
"grad_norm": 0.13166136756817035, |
|
"learning_rate": 4.936540281185423e-05, |
|
"loss": 1.5041, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.831565549427238, |
|
"grad_norm": 0.12481946698483787, |
|
"learning_rate": 4.933180458611364e-05, |
|
"loss": 1.5271, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.8400509121764955, |
|
"grad_norm": 0.12264463761209114, |
|
"learning_rate": 4.9297351771594844e-05, |
|
"loss": 1.5354, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.8485362749257531, |
|
"grad_norm": 0.11985452856537594, |
|
"learning_rate": 4.926204557835968e-05, |
|
"loss": 1.5167, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8570216376750106, |
|
"grad_norm": 0.13125396521190327, |
|
"learning_rate": 4.9225887246442634e-05, |
|
"loss": 1.5282, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.8655070004242681, |
|
"grad_norm": 0.12730192328072554, |
|
"learning_rate": 4.918887804580725e-05, |
|
"loss": 1.5089, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.8739923631735257, |
|
"grad_norm": 0.12724644219344786, |
|
"learning_rate": 4.915101927630153e-05, |
|
"loss": 1.4964, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.8824777259227832, |
|
"grad_norm": 0.13578611501833232, |
|
"learning_rate": 4.911231226761227e-05, |
|
"loss": 1.5189, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.8909630886720408, |
|
"grad_norm": 0.13577513964986457, |
|
"learning_rate": 4.90727583792184e-05, |
|
"loss": 1.5149, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.8994484514212983, |
|
"grad_norm": 0.1269735011676505, |
|
"learning_rate": 4.903235900034317e-05, |
|
"loss": 1.5066, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.9079338141705557, |
|
"grad_norm": 0.13250058214235566, |
|
"learning_rate": 4.899111554990543e-05, |
|
"loss": 1.5129, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.9164191769198133, |
|
"grad_norm": 0.13130735246433495, |
|
"learning_rate": 4.894902947646975e-05, |
|
"loss": 1.5156, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.9249045396690708, |
|
"grad_norm": 0.1273580180253049, |
|
"learning_rate": 4.890610225819553e-05, |
|
"loss": 1.5324, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.9333899024183284, |
|
"grad_norm": 0.13155314243939242, |
|
"learning_rate": 4.8862335402785136e-05, |
|
"loss": 1.5106, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.9418752651675859, |
|
"grad_norm": 0.13564895211984299, |
|
"learning_rate": 4.88177304474309e-05, |
|
"loss": 1.5067, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.9503606279168434, |
|
"grad_norm": 0.12774735587114736, |
|
"learning_rate": 4.877228895876115e-05, |
|
"loss": 1.5182, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.958845990666101, |
|
"grad_norm": 0.1307997709537685, |
|
"learning_rate": 4.872601253278517e-05, |
|
"loss": 1.4969, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.9673313534153585, |
|
"grad_norm": 0.1304794845040634, |
|
"learning_rate": 4.867890279483717e-05, |
|
"loss": 1.5264, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.9758167161646161, |
|
"grad_norm": 0.13666141796489684, |
|
"learning_rate": 4.8630961399519206e-05, |
|
"loss": 1.5467, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.9843020789138736, |
|
"grad_norm": 0.1370278303190263, |
|
"learning_rate": 4.8582190030643e-05, |
|
"loss": 1.5127, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.9927874416631312, |
|
"grad_norm": 0.1390936629299565, |
|
"learning_rate": 4.8532590401170894e-05, |
|
"loss": 1.5058, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.0012728044123886, |
|
"grad_norm": 0.12934475548108287, |
|
"learning_rate": 4.848216425315561e-05, |
|
"loss": 1.5202, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.0097581671616462, |
|
"grad_norm": 0.13898591683370803, |
|
"learning_rate": 4.843091335767913e-05, |
|
"loss": 1.4563, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.0182435299109036, |
|
"grad_norm": 0.17488231535826249, |
|
"learning_rate": 4.837883951479043e-05, |
|
"loss": 1.4402, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.0182435299109036, |
|
"eval_loss": 1.4955657720565796, |
|
"eval_runtime": 52.424, |
|
"eval_samples_per_second": 7.268, |
|
"eval_steps_per_second": 0.916, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.0267288926601612, |
|
"grad_norm": 0.1536036344095855, |
|
"learning_rate": 4.832594455344229e-05, |
|
"loss": 1.4848, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.0352142554094188, |
|
"grad_norm": 0.15762414421336599, |
|
"learning_rate": 4.827223033142706e-05, |
|
"loss": 1.4567, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.0436996181586762, |
|
"grad_norm": 0.15058229398130366, |
|
"learning_rate": 4.8217698735311414e-05, |
|
"loss": 1.4672, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.0521849809079338, |
|
"grad_norm": 0.16010992835678386, |
|
"learning_rate": 4.8162351680370044e-05, |
|
"loss": 1.4458, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.0606703436571914, |
|
"grad_norm": 0.16758816000341356, |
|
"learning_rate": 4.810619111051847e-05, |
|
"loss": 1.4842, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.069155706406449, |
|
"grad_norm": 0.16559260972674986, |
|
"learning_rate": 4.8049218998244696e-05, |
|
"loss": 1.4556, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.0776410691557063, |
|
"grad_norm": 0.17237632034416966, |
|
"learning_rate": 4.7991437344539966e-05, |
|
"loss": 1.4813, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.086126431904964, |
|
"grad_norm": 0.17112756741722487, |
|
"learning_rate": 4.793284817882845e-05, |
|
"loss": 1.4535, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.0946117946542215, |
|
"grad_norm": 0.16828572707718548, |
|
"learning_rate": 4.787345355889604e-05, |
|
"loss": 1.4344, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.103097157403479, |
|
"grad_norm": 0.15709986047041227, |
|
"learning_rate": 4.7813255570817985e-05, |
|
"loss": 1.4744, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.1115825201527365, |
|
"grad_norm": 0.16651547128146313, |
|
"learning_rate": 4.775225632888568e-05, |
|
"loss": 1.4561, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.120067882901994, |
|
"grad_norm": 0.16750176017515714, |
|
"learning_rate": 4.76904579755324e-05, |
|
"loss": 1.4616, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.1285532456512515, |
|
"grad_norm": 0.1608016567554825, |
|
"learning_rate": 4.7627862681258037e-05, |
|
"loss": 1.4593, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.137038608400509, |
|
"grad_norm": 0.21390766919038295, |
|
"learning_rate": 4.756447264455287e-05, |
|
"loss": 1.4484, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.1455239711497667, |
|
"grad_norm": 0.16826883293172662, |
|
"learning_rate": 4.750029009182038e-05, |
|
"loss": 1.4703, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.1540093338990243, |
|
"grad_norm": 0.17431508867079595, |
|
"learning_rate": 4.7435317277299e-05, |
|
"loss": 1.4701, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.1624946966482816, |
|
"grad_norm": 0.15973851467570443, |
|
"learning_rate": 4.736955648298299e-05, |
|
"loss": 1.4503, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.1709800593975392, |
|
"grad_norm": 0.1887713767970947, |
|
"learning_rate": 4.730301001854225e-05, |
|
"loss": 1.4624, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.1794654221467968, |
|
"grad_norm": 0.16898695344997974, |
|
"learning_rate": 4.7235680221241216e-05, |
|
"loss": 1.4452, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.1879507848960542, |
|
"grad_norm": 0.20014553287073528, |
|
"learning_rate": 4.716756945585681e-05, |
|
"loss": 1.4717, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.1964361476453118, |
|
"grad_norm": 0.17137954325200072, |
|
"learning_rate": 4.709868011459528e-05, |
|
"loss": 1.4403, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.2049215103945694, |
|
"grad_norm": 0.17801721751888322, |
|
"learning_rate": 4.7029014617008294e-05, |
|
"loss": 1.4339, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.213406873143827, |
|
"grad_norm": 0.17139613676642362, |
|
"learning_rate": 4.695857540990789e-05, |
|
"loss": 1.4573, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.2218922358930844, |
|
"grad_norm": 0.16971403514498054, |
|
"learning_rate": 4.688736496728058e-05, |
|
"loss": 1.4282, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.230377598642342, |
|
"grad_norm": 0.17200272420880428, |
|
"learning_rate": 4.681538579020038e-05, |
|
"loss": 1.4434, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.2388629613915996, |
|
"grad_norm": 0.17208160407432616, |
|
"learning_rate": 4.6742640406741106e-05, |
|
"loss": 1.45, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.247348324140857, |
|
"grad_norm": 0.1939626212901777, |
|
"learning_rate": 4.666913137188743e-05, |
|
"loss": 1.4608, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.2558336868901145, |
|
"grad_norm": 0.17291794493304186, |
|
"learning_rate": 4.6594861267445236e-05, |
|
"loss": 1.4671, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.2643190496393721, |
|
"grad_norm": 0.18219792041638924, |
|
"learning_rate": 4.651983270195093e-05, |
|
"loss": 1.4262, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.2728044123886297, |
|
"grad_norm": 0.18086437830489926, |
|
"learning_rate": 4.644404831057979e-05, |
|
"loss": 1.4455, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.281289775137887, |
|
"grad_norm": 0.17417619624549402, |
|
"learning_rate": 4.636751075505344e-05, |
|
"loss": 1.4873, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.2897751378871447, |
|
"grad_norm": 0.18354282411845188, |
|
"learning_rate": 4.629022272354637e-05, |
|
"loss": 1.4525, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.298260500636402, |
|
"grad_norm": 0.17985617345325455, |
|
"learning_rate": 4.621218693059149e-05, |
|
"loss": 1.4303, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.3067458633856597, |
|
"grad_norm": 0.1809708317849863, |
|
"learning_rate": 4.6133406116984795e-05, |
|
"loss": 1.4631, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.3152312261349173, |
|
"grad_norm": 0.17487374671212322, |
|
"learning_rate": 4.6053883049689145e-05, |
|
"loss": 1.4482, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.3237165888841749, |
|
"grad_norm": 0.19912807671077193, |
|
"learning_rate": 4.5973620521737036e-05, |
|
"loss": 1.4497, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.3322019516334322, |
|
"grad_norm": 0.17853627546912074, |
|
"learning_rate": 4.5892621352132514e-05, |
|
"loss": 1.4456, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.3406873143826898, |
|
"grad_norm": 0.18252596927754394, |
|
"learning_rate": 4.581088838575218e-05, |
|
"loss": 1.4328, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.3491726771319474, |
|
"grad_norm": 0.17604951053556211, |
|
"learning_rate": 4.572842449324525e-05, |
|
"loss": 1.4442, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.3576580398812048, |
|
"grad_norm": 0.18358942463311748, |
|
"learning_rate": 4.564523257093275e-05, |
|
"loss": 1.4338, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.3661434026304624, |
|
"grad_norm": 0.20508703236267142, |
|
"learning_rate": 4.5561315540705774e-05, |
|
"loss": 1.4445, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.37462876537972, |
|
"grad_norm": 0.18486352550747187, |
|
"learning_rate": 4.547667634992288e-05, |
|
"loss": 1.4261, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.3831141281289776, |
|
"grad_norm": 0.17492766465456316, |
|
"learning_rate": 4.539131797130656e-05, |
|
"loss": 1.4258, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.391599490878235, |
|
"grad_norm": 0.19692876587833674, |
|
"learning_rate": 4.530524340283881e-05, |
|
"loss": 1.4349, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.4000848536274926, |
|
"grad_norm": 0.19155373430892478, |
|
"learning_rate": 4.521845566765589e-05, |
|
"loss": 1.4536, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.4085702163767502, |
|
"grad_norm": 0.18544325977459192, |
|
"learning_rate": 4.513095781394208e-05, |
|
"loss": 1.4363, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.4170555791260075, |
|
"grad_norm": 0.177828004720666, |
|
"learning_rate": 4.504275291482267e-05, |
|
"loss": 1.4595, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.4255409418752651, |
|
"grad_norm": 0.17855432230356816, |
|
"learning_rate": 4.495384406825601e-05, |
|
"loss": 1.4211, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.4340263046245227, |
|
"grad_norm": 0.20232492538380317, |
|
"learning_rate": 4.486423439692469e-05, |
|
"loss": 1.4189, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.4425116673737803, |
|
"grad_norm": 0.1975109303350431, |
|
"learning_rate": 4.477392704812585e-05, |
|
"loss": 1.4565, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.4509970301230377, |
|
"grad_norm": 0.19619010830399825, |
|
"learning_rate": 4.468292519366071e-05, |
|
"loss": 1.4382, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.4594823928722953, |
|
"grad_norm": 0.18168826428246143, |
|
"learning_rate": 4.459123202972308e-05, |
|
"loss": 1.4471, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.4679677556215527, |
|
"grad_norm": 0.1923264062362399, |
|
"learning_rate": 4.449885077678717e-05, |
|
"loss": 1.4153, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.4764531183708103, |
|
"grad_norm": 0.1907937313040222, |
|
"learning_rate": 4.440578467949445e-05, |
|
"loss": 1.4432, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.4849384811200679, |
|
"grad_norm": 0.19107457667767244, |
|
"learning_rate": 4.431203700653968e-05, |
|
"loss": 1.4285, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.4934238438693255, |
|
"grad_norm": 0.19847350429107552, |
|
"learning_rate": 4.421761105055613e-05, |
|
"loss": 1.4383, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.501909206618583, |
|
"grad_norm": 0.18536475556610216, |
|
"learning_rate": 4.4122510127999937e-05, |
|
"loss": 1.42, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.5103945693678404, |
|
"grad_norm": 0.18481023473586697, |
|
"learning_rate": 4.4026737579033584e-05, |
|
"loss": 1.4384, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.518879932117098, |
|
"grad_norm": 0.20863867505874642, |
|
"learning_rate": 4.393029676740864e-05, |
|
"loss": 1.4543, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.5273652948663554, |
|
"grad_norm": 0.1816036870853105, |
|
"learning_rate": 4.3833191080347575e-05, |
|
"loss": 1.434, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.5273652948663554, |
|
"eval_loss": 1.4622184038162231, |
|
"eval_runtime": 52.4041, |
|
"eval_samples_per_second": 7.27, |
|
"eval_steps_per_second": 0.916, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.535850657615613, |
|
"grad_norm": 0.19378252368958881, |
|
"learning_rate": 4.3735423928424815e-05, |
|
"loss": 1.4275, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.5443360203648706, |
|
"grad_norm": 0.20453331251433848, |
|
"learning_rate": 4.363699874544697e-05, |
|
"loss": 1.4203, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.5528213831141282, |
|
"grad_norm": 0.26684319417219377, |
|
"learning_rate": 4.3537918988332156e-05, |
|
"loss": 1.4372, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.5613067458633858, |
|
"grad_norm": 0.25745160303419773, |
|
"learning_rate": 4.343818813698868e-05, |
|
"loss": 1.4082, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.5697921086126432, |
|
"grad_norm": 0.19969727996700776, |
|
"learning_rate": 4.3337809694192765e-05, |
|
"loss": 1.4314, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.5782774713619008, |
|
"grad_norm": 0.20117210832277968, |
|
"learning_rate": 4.3236787185465525e-05, |
|
"loss": 1.4293, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.5867628341111581, |
|
"grad_norm": 0.20173003641028897, |
|
"learning_rate": 4.313512415894913e-05, |
|
"loss": 1.4406, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.5952481968604157, |
|
"grad_norm": 0.20304770794371527, |
|
"learning_rate": 4.303282418528224e-05, |
|
"loss": 1.4286, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.6037335596096733, |
|
"grad_norm": 0.19126658907738198, |
|
"learning_rate": 4.292989085747452e-05, |
|
"loss": 1.4184, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.612218922358931, |
|
"grad_norm": 0.20069554966453027, |
|
"learning_rate": 4.282632779078051e-05, |
|
"loss": 1.4133, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.6207042851081885, |
|
"grad_norm": 0.1952881519566686, |
|
"learning_rate": 4.2722138622572624e-05, |
|
"loss": 1.4432, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.629189647857446, |
|
"grad_norm": 0.19763704668680288, |
|
"learning_rate": 4.261732701221339e-05, |
|
"loss": 1.3921, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.6376750106067033, |
|
"grad_norm": 0.19821464294464497, |
|
"learning_rate": 4.2511896640926925e-05, |
|
"loss": 1.4454, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.6461603733559609, |
|
"grad_norm": 0.20456545626297834, |
|
"learning_rate": 4.240585121166966e-05, |
|
"loss": 1.4147, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.6546457361052185, |
|
"grad_norm": 0.2119092529186395, |
|
"learning_rate": 4.229919444900027e-05, |
|
"loss": 1.3969, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.663131098854476, |
|
"grad_norm": 0.20330157582122357, |
|
"learning_rate": 4.2191930098948865e-05, |
|
"loss": 1.426, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.6716164616037337, |
|
"grad_norm": 0.21761164739298738, |
|
"learning_rate": 4.2084061928885406e-05, |
|
"loss": 1.4246, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.680101824352991, |
|
"grad_norm": 0.19331588142071401, |
|
"learning_rate": 4.197559372738741e-05, |
|
"loss": 1.4305, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.6885871871022486, |
|
"grad_norm": 0.20188460724329996, |
|
"learning_rate": 4.186652930410685e-05, |
|
"loss": 1.4153, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.697072549851506, |
|
"grad_norm": 0.20988950033571588, |
|
"learning_rate": 4.1756872489636425e-05, |
|
"loss": 1.3894, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.7055579126007636, |
|
"grad_norm": 0.1966475893123187, |
|
"learning_rate": 4.1646627135374916e-05, |
|
"loss": 1.3962, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.7140432753500212, |
|
"grad_norm": 0.20785207367991768, |
|
"learning_rate": 4.1535797113392004e-05, |
|
"loss": 1.4037, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.7225286380992788, |
|
"grad_norm": 0.2029940281663133, |
|
"learning_rate": 4.1424386316292224e-05, |
|
"loss": 1.4011, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.7310140008485364, |
|
"grad_norm": 0.2247844551379277, |
|
"learning_rate": 4.131239865707829e-05, |
|
"loss": 1.4084, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.7394993635977938, |
|
"grad_norm": 0.20900441746105022, |
|
"learning_rate": 4.11998380690136e-05, |
|
"loss": 1.4235, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.7479847263470514, |
|
"grad_norm": 0.20362408546889926, |
|
"learning_rate": 4.108670850548416e-05, |
|
"loss": 1.4204, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.7564700890963088, |
|
"grad_norm": 0.22281567946240438, |
|
"learning_rate": 4.097301393985968e-05, |
|
"loss": 1.4023, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.7649554518455663, |
|
"grad_norm": 0.20867113178797225, |
|
"learning_rate": 4.085875836535404e-05, |
|
"loss": 1.3895, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.773440814594824, |
|
"grad_norm": 0.22113231886160947, |
|
"learning_rate": 4.0743945794885063e-05, |
|
"loss": 1.3963, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.7819261773440815, |
|
"grad_norm": 0.22334563577844263, |
|
"learning_rate": 4.062858026093351e-05, |
|
"loss": 1.3988, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.7904115400933391, |
|
"grad_norm": 0.23218581668265403, |
|
"learning_rate": 4.051266581540152e-05, |
|
"loss": 1.4068, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.7988969028425965, |
|
"grad_norm": 0.20295589384571033, |
|
"learning_rate": 4.0396206529470234e-05, |
|
"loss": 1.3883, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.8073822655918539, |
|
"grad_norm": 0.22861611442392848, |
|
"learning_rate": 4.027920649345687e-05, |
|
"loss": 1.4043, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.8158676283411115, |
|
"grad_norm": 0.2083012771089638, |
|
"learning_rate": 4.0161669816671e-05, |
|
"loss": 1.398, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.824352991090369, |
|
"grad_norm": 0.21936173231840464, |
|
"learning_rate": 4.004360062727028e-05, |
|
"loss": 1.4142, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.8328383538396267, |
|
"grad_norm": 0.21383435796328337, |
|
"learning_rate": 3.9925003072115406e-05, |
|
"loss": 1.4138, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.8413237165888843, |
|
"grad_norm": 0.23301608248270392, |
|
"learning_rate": 3.9805881316624506e-05, |
|
"loss": 1.4195, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.8498090793381419, |
|
"grad_norm": 0.22424766656883474, |
|
"learning_rate": 3.968623954462681e-05, |
|
"loss": 1.4011, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.8582944420873992, |
|
"grad_norm": 0.21286417342881453, |
|
"learning_rate": 3.9566081958215734e-05, |
|
"loss": 1.409, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.8667798048366566, |
|
"grad_norm": 0.21944800687444807, |
|
"learning_rate": 3.9445412777601284e-05, |
|
"loss": 1.3877, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.8752651675859142, |
|
"grad_norm": 0.23113173625974803, |
|
"learning_rate": 3.932423624096181e-05, |
|
"loss": 1.4089, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.8837505303351718, |
|
"grad_norm": 0.2081941699587778, |
|
"learning_rate": 3.920255660429517e-05, |
|
"loss": 1.4024, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.8922358930844294, |
|
"grad_norm": 0.2188685806654701, |
|
"learning_rate": 3.908037814126927e-05, |
|
"loss": 1.3878, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.900721255833687, |
|
"grad_norm": 0.22761843244757962, |
|
"learning_rate": 3.895770514307193e-05, |
|
"loss": 1.4004, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.9092066185829444, |
|
"grad_norm": 0.23309183623120422, |
|
"learning_rate": 3.883454191826017e-05, |
|
"loss": 1.4188, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.917691981332202, |
|
"grad_norm": 0.20329785843911802, |
|
"learning_rate": 3.871089279260891e-05, |
|
"loss": 1.3893, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.9261773440814594, |
|
"grad_norm": 0.23470973193726366, |
|
"learning_rate": 3.8586762108958995e-05, |
|
"loss": 1.3974, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.934662706830717, |
|
"grad_norm": 0.22779136837044714, |
|
"learning_rate": 3.8462154227064725e-05, |
|
"loss": 1.4115, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.9431480695799745, |
|
"grad_norm": 0.22338952315651892, |
|
"learning_rate": 3.833707352344068e-05, |
|
"loss": 1.3873, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.9516334323292321, |
|
"grad_norm": 0.23069304025882129, |
|
"learning_rate": 3.821152439120801e-05, |
|
"loss": 1.3944, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.9601187950784897, |
|
"grad_norm": 0.23590596270163203, |
|
"learning_rate": 3.808551123994018e-05, |
|
"loss": 1.3857, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.9686041578277471, |
|
"grad_norm": 0.22545661808214923, |
|
"learning_rate": 3.795903849550805e-05, |
|
"loss": 1.3628, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.9770895205770047, |
|
"grad_norm": 0.2450769875954842, |
|
"learning_rate": 3.7832110599924455e-05, |
|
"loss": 1.4079, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.985574883326262, |
|
"grad_norm": 0.22931499326784313, |
|
"learning_rate": 3.7704732011188166e-05, |
|
"loss": 1.379, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.9940602460755197, |
|
"grad_norm": 0.22417244507397657, |
|
"learning_rate": 3.7576907203127346e-05, |
|
"loss": 1.4035, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.0025456088247773, |
|
"grad_norm": 0.24496197221575314, |
|
"learning_rate": 3.7448640665242406e-05, |
|
"loss": 1.442, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.011030971574035, |
|
"grad_norm": 0.2532740296990078, |
|
"learning_rate": 3.73199369025483e-05, |
|
"loss": 1.2672, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.0195163343232925, |
|
"grad_norm": 0.2890155987968593, |
|
"learning_rate": 3.7190800435416355e-05, |
|
"loss": 1.246, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.02800169707255, |
|
"grad_norm": 0.2541972565696406, |
|
"learning_rate": 3.706123579941545e-05, |
|
"loss": 1.2603, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.036487059821807, |
|
"grad_norm": 0.2530140862527023, |
|
"learning_rate": 3.693124754515272e-05, |
|
"loss": 1.2638, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.036487059821807, |
|
"eval_loss": 1.435962438583374, |
|
"eval_runtime": 52.582, |
|
"eval_samples_per_second": 7.246, |
|
"eval_steps_per_second": 0.913, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.044972422571065, |
|
"grad_norm": 0.25100458343337734, |
|
"learning_rate": 3.680084023811377e-05, |
|
"loss": 1.2711, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.0534577853203224, |
|
"grad_norm": 0.2695727673292618, |
|
"learning_rate": 3.66700184585023e-05, |
|
"loss": 1.2578, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.06194314806958, |
|
"grad_norm": 0.2605068415443213, |
|
"learning_rate": 3.6538786801079226e-05, |
|
"loss": 1.2506, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.0704285108188376, |
|
"grad_norm": 0.27415607207865045, |
|
"learning_rate": 3.64071498750013e-05, |
|
"loss": 1.2852, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.078913873568095, |
|
"grad_norm": 0.2688900338206285, |
|
"learning_rate": 3.627511230365928e-05, |
|
"loss": 1.2695, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.0873992363173524, |
|
"grad_norm": 0.2750825805336503, |
|
"learning_rate": 3.614267872451546e-05, |
|
"loss": 1.2643, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.09588459906661, |
|
"grad_norm": 0.2659269066581903, |
|
"learning_rate": 3.600985378894086e-05, |
|
"loss": 1.2868, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.1043699618158676, |
|
"grad_norm": 0.24411151291321526, |
|
"learning_rate": 3.587664216205183e-05, |
|
"loss": 1.2571, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.112855324565125, |
|
"grad_norm": 0.2574194755634052, |
|
"learning_rate": 3.574304852254621e-05, |
|
"loss": 1.2769, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.1213406873143827, |
|
"grad_norm": 0.2894545074998905, |
|
"learning_rate": 3.5609077562538997e-05, |
|
"loss": 1.2469, |
|
"step": 2500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 5890, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2101429250555904.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|