|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.2426813746287655, |
|
"eval_steps": 600, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00848536274925753, |
|
"grad_norm": 0.4898678891363344, |
|
"learning_rate": 8.488964346349746e-07, |
|
"loss": 1.8056, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01697072549851506, |
|
"grad_norm": 0.3537473179717183, |
|
"learning_rate": 1.6977928692699491e-06, |
|
"loss": 1.7621, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.025456088247772592, |
|
"grad_norm": 0.28215953004159977, |
|
"learning_rate": 2.546689303904924e-06, |
|
"loss": 1.7571, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03394145099703012, |
|
"grad_norm": 0.27446565146764923, |
|
"learning_rate": 3.3955857385398982e-06, |
|
"loss": 1.7136, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04242681374628765, |
|
"grad_norm": 0.17051549768176558, |
|
"learning_rate": 4.244482173174873e-06, |
|
"loss": 1.6767, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.050912176495545185, |
|
"grad_norm": 0.17763882467320422, |
|
"learning_rate": 5.093378607809848e-06, |
|
"loss": 1.6371, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05939753924480271, |
|
"grad_norm": 0.14311462596290048, |
|
"learning_rate": 5.942275042444822e-06, |
|
"loss": 1.6324, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06788290199406025, |
|
"grad_norm": 0.1659540846071645, |
|
"learning_rate": 6.7911714770797965e-06, |
|
"loss": 1.6062, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07636826474331777, |
|
"grad_norm": 0.20064072815620043, |
|
"learning_rate": 7.640067911714771e-06, |
|
"loss": 1.5832, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0848536274925753, |
|
"grad_norm": 0.2179045681711979, |
|
"learning_rate": 8.488964346349745e-06, |
|
"loss": 1.5898, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09333899024183284, |
|
"grad_norm": 0.23866012053128668, |
|
"learning_rate": 9.337860780984721e-06, |
|
"loss": 1.5924, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.10182435299109037, |
|
"grad_norm": 0.18578051776430282, |
|
"learning_rate": 1.0186757215619695e-05, |
|
"loss": 1.5877, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1103097157403479, |
|
"grad_norm": 0.2216509707409362, |
|
"learning_rate": 1.103565365025467e-05, |
|
"loss": 1.5947, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11879507848960542, |
|
"grad_norm": 0.20427142255694086, |
|
"learning_rate": 1.1884550084889643e-05, |
|
"loss": 1.5841, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12728044123886295, |
|
"grad_norm": 0.1765851415675038, |
|
"learning_rate": 1.2733446519524619e-05, |
|
"loss": 1.5878, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1357658039881205, |
|
"grad_norm": 0.1769355117060811, |
|
"learning_rate": 1.3582342954159593e-05, |
|
"loss": 1.5795, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.14425116673737803, |
|
"grad_norm": 0.1617675663096666, |
|
"learning_rate": 1.4431239388794569e-05, |
|
"loss": 1.5549, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.15273652948663555, |
|
"grad_norm": 0.17302259072151574, |
|
"learning_rate": 1.5280135823429543e-05, |
|
"loss": 1.5808, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1612218922358931, |
|
"grad_norm": 0.16876039012432806, |
|
"learning_rate": 1.6129032258064517e-05, |
|
"loss": 1.5676, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1697072549851506, |
|
"grad_norm": 0.19627360154037596, |
|
"learning_rate": 1.697792869269949e-05, |
|
"loss": 1.5598, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17819261773440814, |
|
"grad_norm": 0.16078510362361015, |
|
"learning_rate": 1.7826825127334465e-05, |
|
"loss": 1.5667, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.18667798048366568, |
|
"grad_norm": 0.16044786518959703, |
|
"learning_rate": 1.8675721561969442e-05, |
|
"loss": 1.5815, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.1951633432329232, |
|
"grad_norm": 0.15656958873834717, |
|
"learning_rate": 1.9524617996604416e-05, |
|
"loss": 1.5576, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.20364870598218074, |
|
"grad_norm": 0.1687290471357602, |
|
"learning_rate": 2.037351443123939e-05, |
|
"loss": 1.5453, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21213406873143828, |
|
"grad_norm": 0.1519017348276184, |
|
"learning_rate": 2.1222410865874364e-05, |
|
"loss": 1.5554, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2206194314806958, |
|
"grad_norm": 0.15761892005160086, |
|
"learning_rate": 2.207130730050934e-05, |
|
"loss": 1.5494, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.22910479422995333, |
|
"grad_norm": 0.16857088482977495, |
|
"learning_rate": 2.2920203735144312e-05, |
|
"loss": 1.5794, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.23759015697921085, |
|
"grad_norm": 0.1678705209913503, |
|
"learning_rate": 2.3769100169779286e-05, |
|
"loss": 1.5373, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2460755197284684, |
|
"grad_norm": 0.14812649566587394, |
|
"learning_rate": 2.461799660441426e-05, |
|
"loss": 1.5504, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2545608824777259, |
|
"grad_norm": 0.17651916734325857, |
|
"learning_rate": 2.5466893039049238e-05, |
|
"loss": 1.5607, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.26304624522698344, |
|
"grad_norm": 0.14883055338507856, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 1.5311, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.271531607976241, |
|
"grad_norm": 0.15787522753231265, |
|
"learning_rate": 2.7164685908319186e-05, |
|
"loss": 1.5656, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.2800169707254985, |
|
"grad_norm": 0.1625232940237689, |
|
"learning_rate": 2.801358234295416e-05, |
|
"loss": 1.5686, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.28850233347475607, |
|
"grad_norm": 0.18505951289343867, |
|
"learning_rate": 2.8862478777589137e-05, |
|
"loss": 1.5474, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.29698769622401355, |
|
"grad_norm": 0.13785772316349984, |
|
"learning_rate": 2.9711375212224108e-05, |
|
"loss": 1.5696, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3054730589732711, |
|
"grad_norm": 0.13531274658248552, |
|
"learning_rate": 3.0560271646859086e-05, |
|
"loss": 1.5551, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.31395842172252864, |
|
"grad_norm": 0.1366381415368909, |
|
"learning_rate": 3.140916808149406e-05, |
|
"loss": 1.524, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3224437844717862, |
|
"grad_norm": 0.14587220569353926, |
|
"learning_rate": 3.2258064516129034e-05, |
|
"loss": 1.5515, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3309291472210437, |
|
"grad_norm": 0.13336349383744864, |
|
"learning_rate": 3.310696095076401e-05, |
|
"loss": 1.5457, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.3394145099703012, |
|
"grad_norm": 0.1772016947970983, |
|
"learning_rate": 3.395585738539898e-05, |
|
"loss": 1.5582, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.34789987271955874, |
|
"grad_norm": 0.13819420575084573, |
|
"learning_rate": 3.4804753820033956e-05, |
|
"loss": 1.5326, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.3563852354688163, |
|
"grad_norm": 0.12729862167862188, |
|
"learning_rate": 3.565365025466893e-05, |
|
"loss": 1.5387, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3648705982180738, |
|
"grad_norm": 0.11777082851399363, |
|
"learning_rate": 3.6502546689303904e-05, |
|
"loss": 1.5587, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.37335596096733137, |
|
"grad_norm": 0.15372268131323022, |
|
"learning_rate": 3.7351443123938885e-05, |
|
"loss": 1.5362, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.3818413237165889, |
|
"grad_norm": 0.12616185572252248, |
|
"learning_rate": 3.820033955857386e-05, |
|
"loss": 1.5548, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3903266864658464, |
|
"grad_norm": 0.1311200786303391, |
|
"learning_rate": 3.904923599320883e-05, |
|
"loss": 1.5409, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.39881204921510394, |
|
"grad_norm": 0.1707919112561785, |
|
"learning_rate": 3.989813242784381e-05, |
|
"loss": 1.5509, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4072974119643615, |
|
"grad_norm": 0.14660149264284913, |
|
"learning_rate": 4.074702886247878e-05, |
|
"loss": 1.5433, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.415782774713619, |
|
"grad_norm": 0.12478895483834351, |
|
"learning_rate": 4.1595925297113755e-05, |
|
"loss": 1.5382, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.42426813746287656, |
|
"grad_norm": 0.12327957445795817, |
|
"learning_rate": 4.244482173174873e-05, |
|
"loss": 1.5515, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.43275350021213405, |
|
"grad_norm": 0.12922777738650987, |
|
"learning_rate": 4.32937181663837e-05, |
|
"loss": 1.5688, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.4412388629613916, |
|
"grad_norm": 0.12486802189783415, |
|
"learning_rate": 4.414261460101868e-05, |
|
"loss": 1.5452, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.44972422571064913, |
|
"grad_norm": 0.1360610874577123, |
|
"learning_rate": 4.499151103565366e-05, |
|
"loss": 1.5493, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.45820958845990667, |
|
"grad_norm": 0.1884897685356775, |
|
"learning_rate": 4.5840407470288625e-05, |
|
"loss": 1.5511, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.4666949512091642, |
|
"grad_norm": 0.12446302384809525, |
|
"learning_rate": 4.6689303904923606e-05, |
|
"loss": 1.5458, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.4751803139584217, |
|
"grad_norm": 0.13169591804768588, |
|
"learning_rate": 4.753820033955857e-05, |
|
"loss": 1.5569, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.48366567670767924, |
|
"grad_norm": 0.1343809247449631, |
|
"learning_rate": 4.8387096774193554e-05, |
|
"loss": 1.5408, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.4921510394569368, |
|
"grad_norm": 0.14024589853602, |
|
"learning_rate": 4.923599320882852e-05, |
|
"loss": 1.5487, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5006364022061943, |
|
"grad_norm": 0.16240429253875313, |
|
"learning_rate": 4.999999560970061e-05, |
|
"loss": 1.5488, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5091217649554518, |
|
"grad_norm": 0.12575424857894482, |
|
"learning_rate": 4.999946877563971e-05, |
|
"loss": 1.532, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5091217649554518, |
|
"eval_loss": 1.519254446029663, |
|
"eval_runtime": 53.3242, |
|
"eval_samples_per_second": 7.145, |
|
"eval_steps_per_second": 0.9, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5176071277047094, |
|
"grad_norm": 0.18688482756329736, |
|
"learning_rate": 4.999806390290309e-05, |
|
"loss": 1.5544, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5260924904539669, |
|
"grad_norm": 0.12425469431830571, |
|
"learning_rate": 4.999578104083307e-05, |
|
"loss": 1.5443, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5345778532032245, |
|
"grad_norm": 0.1299027485420099, |
|
"learning_rate": 4.999262026960902e-05, |
|
"loss": 1.5569, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.543063215952482, |
|
"grad_norm": 0.11441754852508934, |
|
"learning_rate": 4.998858170024449e-05, |
|
"loss": 1.5316, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.5515485787017395, |
|
"grad_norm": 0.14888547248976478, |
|
"learning_rate": 4.998366547458326e-05, |
|
"loss": 1.5177, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.560033941450997, |
|
"grad_norm": 0.14859292774768867, |
|
"learning_rate": 4.997787176529449e-05, |
|
"loss": 1.5394, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5685193042002545, |
|
"grad_norm": 0.12499154376539734, |
|
"learning_rate": 4.997120077586651e-05, |
|
"loss": 1.5554, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.5770046669495121, |
|
"grad_norm": 0.1218974898058821, |
|
"learning_rate": 4.9963652740599774e-05, |
|
"loss": 1.5335, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.5854900296987696, |
|
"grad_norm": 0.1273110498715124, |
|
"learning_rate": 4.995522792459859e-05, |
|
"loss": 1.5349, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.5939753924480271, |
|
"grad_norm": 0.12115412881719101, |
|
"learning_rate": 4.994592662376183e-05, |
|
"loss": 1.5419, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6024607551972847, |
|
"grad_norm": 0.14855096330233286, |
|
"learning_rate": 4.99357491647725e-05, |
|
"loss": 1.513, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6109461179465422, |
|
"grad_norm": 0.11407988659327956, |
|
"learning_rate": 4.992469590508628e-05, |
|
"loss": 1.5243, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6194314806957998, |
|
"grad_norm": 0.1197712643781127, |
|
"learning_rate": 4.9912767232919035e-05, |
|
"loss": 1.5177, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.6279168434450573, |
|
"grad_norm": 0.12400515877262065, |
|
"learning_rate": 4.9899963567233074e-05, |
|
"loss": 1.5619, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6364022061943148, |
|
"grad_norm": 0.12250385257708406, |
|
"learning_rate": 4.988628535772249e-05, |
|
"loss": 1.539, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6448875689435724, |
|
"grad_norm": 0.1262441090496857, |
|
"learning_rate": 4.987173308479738e-05, |
|
"loss": 1.5195, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.6533729316928298, |
|
"grad_norm": 0.12459694416473029, |
|
"learning_rate": 4.985630725956694e-05, |
|
"loss": 1.5462, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.6618582944420874, |
|
"grad_norm": 0.12985189006106762, |
|
"learning_rate": 4.9840008423821527e-05, |
|
"loss": 1.5113, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.6703436571913449, |
|
"grad_norm": 0.12689306141471304, |
|
"learning_rate": 4.9822837150013636e-05, |
|
"loss": 1.5201, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.6788290199406024, |
|
"grad_norm": 0.15393156370587963, |
|
"learning_rate": 4.980479404123778e-05, |
|
"loss": 1.5121, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.68731438268986, |
|
"grad_norm": 0.13213701895207608, |
|
"learning_rate": 4.978587973120931e-05, |
|
"loss": 1.5307, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.6957997454391175, |
|
"grad_norm": 0.11561354931316294, |
|
"learning_rate": 4.9766094884242184e-05, |
|
"loss": 1.5316, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7042851081883751, |
|
"grad_norm": 0.12414772399330044, |
|
"learning_rate": 4.974544019522559e-05, |
|
"loss": 1.5148, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7127704709376326, |
|
"grad_norm": 0.1171652849153521, |
|
"learning_rate": 4.972391638959959e-05, |
|
"loss": 1.5096, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7212558336868902, |
|
"grad_norm": 0.12868937349582316, |
|
"learning_rate": 4.9701524223329585e-05, |
|
"loss": 1.5282, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7297411964361477, |
|
"grad_norm": 0.1200015077117309, |
|
"learning_rate": 4.967826448287981e-05, |
|
"loss": 1.5512, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.7382265591854051, |
|
"grad_norm": 0.12340885660045105, |
|
"learning_rate": 4.96541379851857e-05, |
|
"loss": 1.5394, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.7467119219346627, |
|
"grad_norm": 0.12976937691467555, |
|
"learning_rate": 4.962914557762517e-05, |
|
"loss": 1.51, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.7551972846839202, |
|
"grad_norm": 0.11912878476038466, |
|
"learning_rate": 4.9603288137988905e-05, |
|
"loss": 1.5294, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.7636826474331778, |
|
"grad_norm": 0.1299625480337927, |
|
"learning_rate": 4.957656657444947e-05, |
|
"loss": 1.507, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.7721680101824353, |
|
"grad_norm": 0.12380144459698468, |
|
"learning_rate": 4.954898182552946e-05, |
|
"loss": 1.5376, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.7806533729316928, |
|
"grad_norm": 0.13139339643682763, |
|
"learning_rate": 4.9520534860068535e-05, |
|
"loss": 1.5291, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.7891387356809504, |
|
"grad_norm": 0.13088956203983898, |
|
"learning_rate": 4.949122667718935e-05, |
|
"loss": 1.5239, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.7976240984302079, |
|
"grad_norm": 0.12586052988453703, |
|
"learning_rate": 4.94610583062625e-05, |
|
"loss": 1.5525, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8061094611794655, |
|
"grad_norm": 0.12020996031652877, |
|
"learning_rate": 4.943003080687035e-05, |
|
"loss": 1.5525, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.814594823928723, |
|
"grad_norm": 0.12866375954060869, |
|
"learning_rate": 4.9398145268769856e-05, |
|
"loss": 1.5266, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8230801866779804, |
|
"grad_norm": 0.13166136756817035, |
|
"learning_rate": 4.936540281185423e-05, |
|
"loss": 1.5041, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.831565549427238, |
|
"grad_norm": 0.12481946698483787, |
|
"learning_rate": 4.933180458611364e-05, |
|
"loss": 1.5271, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.8400509121764955, |
|
"grad_norm": 0.12264463761209114, |
|
"learning_rate": 4.9297351771594844e-05, |
|
"loss": 1.5354, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.8485362749257531, |
|
"grad_norm": 0.11985452856537594, |
|
"learning_rate": 4.926204557835968e-05, |
|
"loss": 1.5167, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8570216376750106, |
|
"grad_norm": 0.13125396521190327, |
|
"learning_rate": 4.9225887246442634e-05, |
|
"loss": 1.5282, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.8655070004242681, |
|
"grad_norm": 0.12730192328072554, |
|
"learning_rate": 4.918887804580725e-05, |
|
"loss": 1.5089, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.8739923631735257, |
|
"grad_norm": 0.12724644219344786, |
|
"learning_rate": 4.915101927630153e-05, |
|
"loss": 1.4964, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.8824777259227832, |
|
"grad_norm": 0.13578611501833232, |
|
"learning_rate": 4.911231226761227e-05, |
|
"loss": 1.5189, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.8909630886720408, |
|
"grad_norm": 0.13577513964986457, |
|
"learning_rate": 4.90727583792184e-05, |
|
"loss": 1.5149, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.8994484514212983, |
|
"grad_norm": 0.1269735011676505, |
|
"learning_rate": 4.903235900034317e-05, |
|
"loss": 1.5066, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.9079338141705557, |
|
"grad_norm": 0.13250058214235566, |
|
"learning_rate": 4.899111554990543e-05, |
|
"loss": 1.5129, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.9164191769198133, |
|
"grad_norm": 0.13130735246433495, |
|
"learning_rate": 4.894902947646975e-05, |
|
"loss": 1.5156, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.9249045396690708, |
|
"grad_norm": 0.1273580180253049, |
|
"learning_rate": 4.890610225819553e-05, |
|
"loss": 1.5324, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.9333899024183284, |
|
"grad_norm": 0.13155314243939242, |
|
"learning_rate": 4.8862335402785136e-05, |
|
"loss": 1.5106, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.9418752651675859, |
|
"grad_norm": 0.13564895211984299, |
|
"learning_rate": 4.88177304474309e-05, |
|
"loss": 1.5067, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.9503606279168434, |
|
"grad_norm": 0.12774735587114736, |
|
"learning_rate": 4.877228895876115e-05, |
|
"loss": 1.5182, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.958845990666101, |
|
"grad_norm": 0.1307997709537685, |
|
"learning_rate": 4.872601253278517e-05, |
|
"loss": 1.4969, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.9673313534153585, |
|
"grad_norm": 0.1304794845040634, |
|
"learning_rate": 4.867890279483717e-05, |
|
"loss": 1.5264, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.9758167161646161, |
|
"grad_norm": 0.13666141796489684, |
|
"learning_rate": 4.8630961399519206e-05, |
|
"loss": 1.5467, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.9843020789138736, |
|
"grad_norm": 0.1370278303190263, |
|
"learning_rate": 4.8582190030643e-05, |
|
"loss": 1.5127, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.9927874416631312, |
|
"grad_norm": 0.1390936629299565, |
|
"learning_rate": 4.8532590401170894e-05, |
|
"loss": 1.5058, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.0012728044123886, |
|
"grad_norm": 0.12934475548108287, |
|
"learning_rate": 4.848216425315561e-05, |
|
"loss": 1.5202, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.0097581671616462, |
|
"grad_norm": 0.13898591683370803, |
|
"learning_rate": 4.843091335767913e-05, |
|
"loss": 1.4563, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.0182435299109036, |
|
"grad_norm": 0.17488231535826249, |
|
"learning_rate": 4.837883951479043e-05, |
|
"loss": 1.4402, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.0182435299109036, |
|
"eval_loss": 1.4955657720565796, |
|
"eval_runtime": 52.424, |
|
"eval_samples_per_second": 7.268, |
|
"eval_steps_per_second": 0.916, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.0267288926601612, |
|
"grad_norm": 0.1536036344095855, |
|
"learning_rate": 4.832594455344229e-05, |
|
"loss": 1.4848, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.0352142554094188, |
|
"grad_norm": 0.15762414421336599, |
|
"learning_rate": 4.827223033142706e-05, |
|
"loss": 1.4567, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.0436996181586762, |
|
"grad_norm": 0.15058229398130366, |
|
"learning_rate": 4.8217698735311414e-05, |
|
"loss": 1.4672, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.0521849809079338, |
|
"grad_norm": 0.16010992835678386, |
|
"learning_rate": 4.8162351680370044e-05, |
|
"loss": 1.4458, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.0606703436571914, |
|
"grad_norm": 0.16758816000341356, |
|
"learning_rate": 4.810619111051847e-05, |
|
"loss": 1.4842, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.069155706406449, |
|
"grad_norm": 0.16559260972674986, |
|
"learning_rate": 4.8049218998244696e-05, |
|
"loss": 1.4556, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.0776410691557063, |
|
"grad_norm": 0.17237632034416966, |
|
"learning_rate": 4.7991437344539966e-05, |
|
"loss": 1.4813, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.086126431904964, |
|
"grad_norm": 0.17112756741722487, |
|
"learning_rate": 4.793284817882845e-05, |
|
"loss": 1.4535, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.0946117946542215, |
|
"grad_norm": 0.16828572707718548, |
|
"learning_rate": 4.787345355889604e-05, |
|
"loss": 1.4344, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.103097157403479, |
|
"grad_norm": 0.15709986047041227, |
|
"learning_rate": 4.7813255570817985e-05, |
|
"loss": 1.4744, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.1115825201527365, |
|
"grad_norm": 0.16651547128146313, |
|
"learning_rate": 4.775225632888568e-05, |
|
"loss": 1.4561, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.120067882901994, |
|
"grad_norm": 0.16750176017515714, |
|
"learning_rate": 4.76904579755324e-05, |
|
"loss": 1.4616, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.1285532456512515, |
|
"grad_norm": 0.1608016567554825, |
|
"learning_rate": 4.7627862681258037e-05, |
|
"loss": 1.4593, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.137038608400509, |
|
"grad_norm": 0.21390766919038295, |
|
"learning_rate": 4.756447264455287e-05, |
|
"loss": 1.4484, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.1455239711497667, |
|
"grad_norm": 0.16826883293172662, |
|
"learning_rate": 4.750029009182038e-05, |
|
"loss": 1.4703, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.1540093338990243, |
|
"grad_norm": 0.17431508867079595, |
|
"learning_rate": 4.7435317277299e-05, |
|
"loss": 1.4701, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.1624946966482816, |
|
"grad_norm": 0.15973851467570443, |
|
"learning_rate": 4.736955648298299e-05, |
|
"loss": 1.4503, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.1709800593975392, |
|
"grad_norm": 0.1887713767970947, |
|
"learning_rate": 4.730301001854225e-05, |
|
"loss": 1.4624, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.1794654221467968, |
|
"grad_norm": 0.16898695344997974, |
|
"learning_rate": 4.7235680221241216e-05, |
|
"loss": 1.4452, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.1879507848960542, |
|
"grad_norm": 0.20014553287073528, |
|
"learning_rate": 4.716756945585681e-05, |
|
"loss": 1.4717, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.1964361476453118, |
|
"grad_norm": 0.17137954325200072, |
|
"learning_rate": 4.709868011459528e-05, |
|
"loss": 1.4403, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.2049215103945694, |
|
"grad_norm": 0.17801721751888322, |
|
"learning_rate": 4.7029014617008294e-05, |
|
"loss": 1.4339, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.213406873143827, |
|
"grad_norm": 0.17139613676642362, |
|
"learning_rate": 4.695857540990789e-05, |
|
"loss": 1.4573, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.2218922358930844, |
|
"grad_norm": 0.16971403514498054, |
|
"learning_rate": 4.688736496728058e-05, |
|
"loss": 1.4282, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.230377598642342, |
|
"grad_norm": 0.17200272420880428, |
|
"learning_rate": 4.681538579020038e-05, |
|
"loss": 1.4434, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.2388629613915996, |
|
"grad_norm": 0.17208160407432616, |
|
"learning_rate": 4.6742640406741106e-05, |
|
"loss": 1.45, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.247348324140857, |
|
"grad_norm": 0.1939626212901777, |
|
"learning_rate": 4.666913137188743e-05, |
|
"loss": 1.4608, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.2558336868901145, |
|
"grad_norm": 0.17291794493304186, |
|
"learning_rate": 4.6594861267445236e-05, |
|
"loss": 1.4671, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.2643190496393721, |
|
"grad_norm": 0.18219792041638924, |
|
"learning_rate": 4.651983270195093e-05, |
|
"loss": 1.4262, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.2728044123886297, |
|
"grad_norm": 0.18086437830489926, |
|
"learning_rate": 4.644404831057979e-05, |
|
"loss": 1.4455, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.281289775137887, |
|
"grad_norm": 0.17417619624549402, |
|
"learning_rate": 4.636751075505344e-05, |
|
"loss": 1.4873, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.2897751378871447, |
|
"grad_norm": 0.18354282411845188, |
|
"learning_rate": 4.629022272354637e-05, |
|
"loss": 1.4525, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.298260500636402, |
|
"grad_norm": 0.17985617345325455, |
|
"learning_rate": 4.621218693059149e-05, |
|
"loss": 1.4303, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.3067458633856597, |
|
"grad_norm": 0.1809708317849863, |
|
"learning_rate": 4.6133406116984795e-05, |
|
"loss": 1.4631, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.3152312261349173, |
|
"grad_norm": 0.17487374671212322, |
|
"learning_rate": 4.6053883049689145e-05, |
|
"loss": 1.4482, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.3237165888841749, |
|
"grad_norm": 0.19912807671077193, |
|
"learning_rate": 4.5973620521737036e-05, |
|
"loss": 1.4497, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.3322019516334322, |
|
"grad_norm": 0.17853627546912074, |
|
"learning_rate": 4.5892621352132514e-05, |
|
"loss": 1.4456, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.3406873143826898, |
|
"grad_norm": 0.18252596927754394, |
|
"learning_rate": 4.581088838575218e-05, |
|
"loss": 1.4328, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.3491726771319474, |
|
"grad_norm": 0.17604951053556211, |
|
"learning_rate": 4.572842449324525e-05, |
|
"loss": 1.4442, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.3576580398812048, |
|
"grad_norm": 0.18358942463311748, |
|
"learning_rate": 4.564523257093275e-05, |
|
"loss": 1.4338, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.3661434026304624, |
|
"grad_norm": 0.20508703236267142, |
|
"learning_rate": 4.5561315540705774e-05, |
|
"loss": 1.4445, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.37462876537972, |
|
"grad_norm": 0.18486352550747187, |
|
"learning_rate": 4.547667634992288e-05, |
|
"loss": 1.4261, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.3831141281289776, |
|
"grad_norm": 0.17492766465456316, |
|
"learning_rate": 4.539131797130656e-05, |
|
"loss": 1.4258, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.391599490878235, |
|
"grad_norm": 0.19692876587833674, |
|
"learning_rate": 4.530524340283881e-05, |
|
"loss": 1.4349, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.4000848536274926, |
|
"grad_norm": 0.19155373430892478, |
|
"learning_rate": 4.521845566765589e-05, |
|
"loss": 1.4536, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.4085702163767502, |
|
"grad_norm": 0.18544325977459192, |
|
"learning_rate": 4.513095781394208e-05, |
|
"loss": 1.4363, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.4170555791260075, |
|
"grad_norm": 0.177828004720666, |
|
"learning_rate": 4.504275291482267e-05, |
|
"loss": 1.4595, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.4255409418752651, |
|
"grad_norm": 0.17855432230356816, |
|
"learning_rate": 4.495384406825601e-05, |
|
"loss": 1.4211, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.4340263046245227, |
|
"grad_norm": 0.20232492538380317, |
|
"learning_rate": 4.486423439692469e-05, |
|
"loss": 1.4189, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.4425116673737803, |
|
"grad_norm": 0.1975109303350431, |
|
"learning_rate": 4.477392704812585e-05, |
|
"loss": 1.4565, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.4509970301230377, |
|
"grad_norm": 0.19619010830399825, |
|
"learning_rate": 4.468292519366071e-05, |
|
"loss": 1.4382, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.4594823928722953, |
|
"grad_norm": 0.18168826428246143, |
|
"learning_rate": 4.459123202972308e-05, |
|
"loss": 1.4471, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.4679677556215527, |
|
"grad_norm": 0.1923264062362399, |
|
"learning_rate": 4.449885077678717e-05, |
|
"loss": 1.4153, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.4764531183708103, |
|
"grad_norm": 0.1907937313040222, |
|
"learning_rate": 4.440578467949445e-05, |
|
"loss": 1.4432, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.4849384811200679, |
|
"grad_norm": 0.19107457667767244, |
|
"learning_rate": 4.431203700653968e-05, |
|
"loss": 1.4285, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.4934238438693255, |
|
"grad_norm": 0.19847350429107552, |
|
"learning_rate": 4.421761105055613e-05, |
|
"loss": 1.4383, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.501909206618583, |
|
"grad_norm": 0.18536475556610216, |
|
"learning_rate": 4.4122510127999937e-05, |
|
"loss": 1.42, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.5103945693678404, |
|
"grad_norm": 0.18481023473586697, |
|
"learning_rate": 4.4026737579033584e-05, |
|
"loss": 1.4384, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.518879932117098, |
|
"grad_norm": 0.20863867505874642, |
|
"learning_rate": 4.393029676740864e-05, |
|
"loss": 1.4543, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.5273652948663554, |
|
"grad_norm": 0.1816036870853105, |
|
"learning_rate": 4.3833191080347575e-05, |
|
"loss": 1.434, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.5273652948663554, |
|
"eval_loss": 1.4622184038162231, |
|
"eval_runtime": 52.4041, |
|
"eval_samples_per_second": 7.27, |
|
"eval_steps_per_second": 0.916, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.535850657615613, |
|
"grad_norm": 0.19378252368958881, |
|
"learning_rate": 4.3735423928424815e-05, |
|
"loss": 1.4275, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.5443360203648706, |
|
"grad_norm": 0.20453331251433848, |
|
"learning_rate": 4.363699874544697e-05, |
|
"loss": 1.4203, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.5528213831141282, |
|
"grad_norm": 0.26684319417219377, |
|
"learning_rate": 4.3537918988332156e-05, |
|
"loss": 1.4372, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.5613067458633858, |
|
"grad_norm": 0.25745160303419773, |
|
"learning_rate": 4.343818813698868e-05, |
|
"loss": 1.4082, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.5697921086126432, |
|
"grad_norm": 0.19969727996700776, |
|
"learning_rate": 4.3337809694192765e-05, |
|
"loss": 1.4314, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.5782774713619008, |
|
"grad_norm": 0.20117210832277968, |
|
"learning_rate": 4.3236787185465525e-05, |
|
"loss": 1.4293, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.5867628341111581, |
|
"grad_norm": 0.20173003641028897, |
|
"learning_rate": 4.313512415894913e-05, |
|
"loss": 1.4406, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.5952481968604157, |
|
"grad_norm": 0.20304770794371527, |
|
"learning_rate": 4.303282418528224e-05, |
|
"loss": 1.4286, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.6037335596096733, |
|
"grad_norm": 0.19126658907738198, |
|
"learning_rate": 4.292989085747452e-05, |
|
"loss": 1.4184, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.612218922358931, |
|
"grad_norm": 0.20069554966453027, |
|
"learning_rate": 4.282632779078051e-05, |
|
"loss": 1.4133, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.6207042851081885, |
|
"grad_norm": 0.1952881519566686, |
|
"learning_rate": 4.2722138622572624e-05, |
|
"loss": 1.4432, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.629189647857446, |
|
"grad_norm": 0.19763704668680288, |
|
"learning_rate": 4.261732701221339e-05, |
|
"loss": 1.3921, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.6376750106067033, |
|
"grad_norm": 0.19821464294464497, |
|
"learning_rate": 4.2511896640926925e-05, |
|
"loss": 1.4454, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.6461603733559609, |
|
"grad_norm": 0.20456545626297834, |
|
"learning_rate": 4.240585121166966e-05, |
|
"loss": 1.4147, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.6546457361052185, |
|
"grad_norm": 0.2119092529186395, |
|
"learning_rate": 4.229919444900027e-05, |
|
"loss": 1.3969, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.663131098854476, |
|
"grad_norm": 0.20330157582122357, |
|
"learning_rate": 4.2191930098948865e-05, |
|
"loss": 1.426, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.6716164616037337, |
|
"grad_norm": 0.21761164739298738, |
|
"learning_rate": 4.2084061928885406e-05, |
|
"loss": 1.4246, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.680101824352991, |
|
"grad_norm": 0.19331588142071401, |
|
"learning_rate": 4.197559372738741e-05, |
|
"loss": 1.4305, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.6885871871022486, |
|
"grad_norm": 0.20188460724329996, |
|
"learning_rate": 4.186652930410685e-05, |
|
"loss": 1.4153, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.697072549851506, |
|
"grad_norm": 0.20988950033571588, |
|
"learning_rate": 4.1756872489636425e-05, |
|
"loss": 1.3894, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.7055579126007636, |
|
"grad_norm": 0.1966475893123187, |
|
"learning_rate": 4.1646627135374916e-05, |
|
"loss": 1.3962, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.7140432753500212, |
|
"grad_norm": 0.20785207367991768, |
|
"learning_rate": 4.1535797113392004e-05, |
|
"loss": 1.4037, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.7225286380992788, |
|
"grad_norm": 0.2029940281663133, |
|
"learning_rate": 4.1424386316292224e-05, |
|
"loss": 1.4011, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.7310140008485364, |
|
"grad_norm": 0.2247844551379277, |
|
"learning_rate": 4.131239865707829e-05, |
|
"loss": 1.4084, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.7394993635977938, |
|
"grad_norm": 0.20900441746105022, |
|
"learning_rate": 4.11998380690136e-05, |
|
"loss": 1.4235, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.7479847263470514, |
|
"grad_norm": 0.20362408546889926, |
|
"learning_rate": 4.108670850548416e-05, |
|
"loss": 1.4204, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.7564700890963088, |
|
"grad_norm": 0.22281567946240438, |
|
"learning_rate": 4.097301393985968e-05, |
|
"loss": 1.4023, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.7649554518455663, |
|
"grad_norm": 0.20867113178797225, |
|
"learning_rate": 4.085875836535404e-05, |
|
"loss": 1.3895, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.773440814594824, |
|
"grad_norm": 0.22113231886160947, |
|
"learning_rate": 4.0743945794885063e-05, |
|
"loss": 1.3963, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.7819261773440815, |
|
"grad_norm": 0.22334563577844263, |
|
"learning_rate": 4.062858026093351e-05, |
|
"loss": 1.3988, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.7904115400933391, |
|
"grad_norm": 0.23218581668265403, |
|
"learning_rate": 4.051266581540152e-05, |
|
"loss": 1.4068, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.7988969028425965, |
|
"grad_norm": 0.20295589384571033, |
|
"learning_rate": 4.0396206529470234e-05, |
|
"loss": 1.3883, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.8073822655918539, |
|
"grad_norm": 0.22861611442392848, |
|
"learning_rate": 4.027920649345687e-05, |
|
"loss": 1.4043, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.8158676283411115, |
|
"grad_norm": 0.2083012771089638, |
|
"learning_rate": 4.0161669816671e-05, |
|
"loss": 1.398, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.824352991090369, |
|
"grad_norm": 0.21936173231840464, |
|
"learning_rate": 4.004360062727028e-05, |
|
"loss": 1.4142, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.8328383538396267, |
|
"grad_norm": 0.21383435796328337, |
|
"learning_rate": 3.9925003072115406e-05, |
|
"loss": 1.4138, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.8413237165888843, |
|
"grad_norm": 0.23301608248270392, |
|
"learning_rate": 3.9805881316624506e-05, |
|
"loss": 1.4195, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.8498090793381419, |
|
"grad_norm": 0.22424766656883474, |
|
"learning_rate": 3.968623954462681e-05, |
|
"loss": 1.4011, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.8582944420873992, |
|
"grad_norm": 0.21286417342881453, |
|
"learning_rate": 3.9566081958215734e-05, |
|
"loss": 1.409, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.8667798048366566, |
|
"grad_norm": 0.21944800687444807, |
|
"learning_rate": 3.9445412777601284e-05, |
|
"loss": 1.3877, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.8752651675859142, |
|
"grad_norm": 0.23113173625974803, |
|
"learning_rate": 3.932423624096181e-05, |
|
"loss": 1.4089, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.8837505303351718, |
|
"grad_norm": 0.2081941699587778, |
|
"learning_rate": 3.920255660429517e-05, |
|
"loss": 1.4024, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.8922358930844294, |
|
"grad_norm": 0.2188685806654701, |
|
"learning_rate": 3.908037814126927e-05, |
|
"loss": 1.3878, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.900721255833687, |
|
"grad_norm": 0.22761843244757962, |
|
"learning_rate": 3.895770514307193e-05, |
|
"loss": 1.4004, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.9092066185829444, |
|
"grad_norm": 0.23309183623120422, |
|
"learning_rate": 3.883454191826017e-05, |
|
"loss": 1.4188, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.917691981332202, |
|
"grad_norm": 0.20329785843911802, |
|
"learning_rate": 3.871089279260891e-05, |
|
"loss": 1.3893, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.9261773440814594, |
|
"grad_norm": 0.23470973193726366, |
|
"learning_rate": 3.8586762108958995e-05, |
|
"loss": 1.3974, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.934662706830717, |
|
"grad_norm": 0.22779136837044714, |
|
"learning_rate": 3.8462154227064725e-05, |
|
"loss": 1.4115, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.9431480695799745, |
|
"grad_norm": 0.22338952315651892, |
|
"learning_rate": 3.833707352344068e-05, |
|
"loss": 1.3873, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.9516334323292321, |
|
"grad_norm": 0.23069304025882129, |
|
"learning_rate": 3.821152439120801e-05, |
|
"loss": 1.3944, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.9601187950784897, |
|
"grad_norm": 0.23590596270163203, |
|
"learning_rate": 3.808551123994018e-05, |
|
"loss": 1.3857, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.9686041578277471, |
|
"grad_norm": 0.22545661808214923, |
|
"learning_rate": 3.795903849550805e-05, |
|
"loss": 1.3628, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.9770895205770047, |
|
"grad_norm": 0.2450769875954842, |
|
"learning_rate": 3.7832110599924455e-05, |
|
"loss": 1.4079, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.985574883326262, |
|
"grad_norm": 0.22931499326784313, |
|
"learning_rate": 3.7704732011188166e-05, |
|
"loss": 1.379, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.9940602460755197, |
|
"grad_norm": 0.22417244507397657, |
|
"learning_rate": 3.7576907203127346e-05, |
|
"loss": 1.4035, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.0025456088247773, |
|
"grad_norm": 0.24496197221575314, |
|
"learning_rate": 3.7448640665242406e-05, |
|
"loss": 1.442, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.011030971574035, |
|
"grad_norm": 0.2532740296990078, |
|
"learning_rate": 3.73199369025483e-05, |
|
"loss": 1.2672, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.0195163343232925, |
|
"grad_norm": 0.2890155987968593, |
|
"learning_rate": 3.7190800435416355e-05, |
|
"loss": 1.246, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.02800169707255, |
|
"grad_norm": 0.2541972565696406, |
|
"learning_rate": 3.706123579941545e-05, |
|
"loss": 1.2603, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.036487059821807, |
|
"grad_norm": 0.2530140862527023, |
|
"learning_rate": 3.693124754515272e-05, |
|
"loss": 1.2638, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.036487059821807, |
|
"eval_loss": 1.435962438583374, |
|
"eval_runtime": 52.582, |
|
"eval_samples_per_second": 7.246, |
|
"eval_steps_per_second": 0.913, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.044972422571065, |
|
"grad_norm": 0.25100458343337734, |
|
"learning_rate": 3.680084023811377e-05, |
|
"loss": 1.2711, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.0534577853203224, |
|
"grad_norm": 0.2695727673292618, |
|
"learning_rate": 3.66700184585023e-05, |
|
"loss": 1.2578, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.06194314806958, |
|
"grad_norm": 0.2605068415443213, |
|
"learning_rate": 3.6538786801079226e-05, |
|
"loss": 1.2506, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.0704285108188376, |
|
"grad_norm": 0.27415607207865045, |
|
"learning_rate": 3.64071498750013e-05, |
|
"loss": 1.2852, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.078913873568095, |
|
"grad_norm": 0.2688900338206285, |
|
"learning_rate": 3.627511230365928e-05, |
|
"loss": 1.2695, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.0873992363173524, |
|
"grad_norm": 0.2750825805336503, |
|
"learning_rate": 3.614267872451546e-05, |
|
"loss": 1.2643, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.09588459906661, |
|
"grad_norm": 0.2659269066581903, |
|
"learning_rate": 3.600985378894086e-05, |
|
"loss": 1.2868, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.1043699618158676, |
|
"grad_norm": 0.24411151291321526, |
|
"learning_rate": 3.587664216205183e-05, |
|
"loss": 1.2571, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.112855324565125, |
|
"grad_norm": 0.2574194755634052, |
|
"learning_rate": 3.574304852254621e-05, |
|
"loss": 1.2769, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.1213406873143827, |
|
"grad_norm": 0.2894545074998905, |
|
"learning_rate": 3.5609077562538997e-05, |
|
"loss": 1.2469, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.1298260500636403, |
|
"grad_norm": 0.2828176429904294, |
|
"learning_rate": 3.547473398739754e-05, |
|
"loss": 1.2527, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 2.138311412812898, |
|
"grad_norm": 0.25886029771650565, |
|
"learning_rate": 3.5340022515576294e-05, |
|
"loss": 1.2578, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 2.146796775562155, |
|
"grad_norm": 0.2783799371621383, |
|
"learning_rate": 3.52049478784511e-05, |
|
"loss": 1.2489, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 2.1552821383114127, |
|
"grad_norm": 0.2753116113218978, |
|
"learning_rate": 3.506951482015297e-05, |
|
"loss": 1.275, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 2.1637675010606703, |
|
"grad_norm": 0.28115792079727675, |
|
"learning_rate": 3.493372809740152e-05, |
|
"loss": 1.2554, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.172252863809928, |
|
"grad_norm": 0.27954425325951715, |
|
"learning_rate": 3.479759247933785e-05, |
|
"loss": 1.2618, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.1807382265591855, |
|
"grad_norm": 0.27555174232347995, |
|
"learning_rate": 3.466111274735707e-05, |
|
"loss": 1.2598, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.189223589308443, |
|
"grad_norm": 0.27280827991301104, |
|
"learning_rate": 3.452429369494037e-05, |
|
"loss": 1.262, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.1977089520577007, |
|
"grad_norm": 0.2749685805551003, |
|
"learning_rate": 3.438714012748664e-05, |
|
"loss": 1.2683, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.206194314806958, |
|
"grad_norm": 0.2780594302788235, |
|
"learning_rate": 3.424965686214371e-05, |
|
"loss": 1.2462, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.2146796775562154, |
|
"grad_norm": 0.2942257416636676, |
|
"learning_rate": 3.411184872763915e-05, |
|
"loss": 1.2581, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.223165040305473, |
|
"grad_norm": 0.27000377333423803, |
|
"learning_rate": 3.39737205641107e-05, |
|
"loss": 1.2412, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.2316504030547306, |
|
"grad_norm": 0.28187507810449336, |
|
"learning_rate": 3.383527722293622e-05, |
|
"loss": 1.2659, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.240135765803988, |
|
"grad_norm": 0.2736213940552268, |
|
"learning_rate": 3.369652356656336e-05, |
|
"loss": 1.2553, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.248621128553246, |
|
"grad_norm": 0.29698834543438446, |
|
"learning_rate": 3.355746446833873e-05, |
|
"loss": 1.2714, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.257106491302503, |
|
"grad_norm": 0.2875128112484735, |
|
"learning_rate": 3.3418104812336786e-05, |
|
"loss": 1.2508, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.2655918540517606, |
|
"grad_norm": 0.3016647299373059, |
|
"learning_rate": 3.327844949318824e-05, |
|
"loss": 1.2451, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.274077216801018, |
|
"grad_norm": 0.27371321581702696, |
|
"learning_rate": 3.3138503415908176e-05, |
|
"loss": 1.2467, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.2825625795502757, |
|
"grad_norm": 0.28374547760120017, |
|
"learning_rate": 3.299827149572376e-05, |
|
"loss": 1.2452, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.2910479422995333, |
|
"grad_norm": 0.2805999278165284, |
|
"learning_rate": 3.285775865790166e-05, |
|
"loss": 1.2595, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.299533305048791, |
|
"grad_norm": 0.2758019804125597, |
|
"learning_rate": 3.271696983757496e-05, |
|
"loss": 1.2583, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.3080186677980485, |
|
"grad_norm": 0.27211127699988974, |
|
"learning_rate": 3.2575909979569906e-05, |
|
"loss": 1.2255, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.316504030547306, |
|
"grad_norm": 0.2741831859110416, |
|
"learning_rate": 3.243458403823223e-05, |
|
"loss": 1.2335, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.3249893932965633, |
|
"grad_norm": 0.287074507507, |
|
"learning_rate": 3.2292996977253075e-05, |
|
"loss": 1.2555, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.333474756045821, |
|
"grad_norm": 0.2760197579958247, |
|
"learning_rate": 3.215115376949474e-05, |
|
"loss": 1.2574, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.3419601187950785, |
|
"grad_norm": 0.29917391348714156, |
|
"learning_rate": 3.200905939681599e-05, |
|
"loss": 1.2232, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.350445481544336, |
|
"grad_norm": 0.2863180346672473, |
|
"learning_rate": 3.1866718849897044e-05, |
|
"loss": 1.2341, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.3589308442935937, |
|
"grad_norm": 0.2760526831444543, |
|
"learning_rate": 3.172413712806435e-05, |
|
"loss": 1.253, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.3674162070428513, |
|
"grad_norm": 0.29286413736773825, |
|
"learning_rate": 3.158131923911498e-05, |
|
"loss": 1.2617, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.3759015697921084, |
|
"grad_norm": 0.27643034174892955, |
|
"learning_rate": 3.143827019914072e-05, |
|
"loss": 1.2152, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.384386932541366, |
|
"grad_norm": 0.2939949433037669, |
|
"learning_rate": 3.12949950323519e-05, |
|
"loss": 1.2354, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.3928722952906236, |
|
"grad_norm": 0.2864245267570891, |
|
"learning_rate": 3.115149877090097e-05, |
|
"loss": 1.2447, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.401357658039881, |
|
"grad_norm": 0.2952829920235313, |
|
"learning_rate": 3.1007786454705724e-05, |
|
"loss": 1.2462, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.409843020789139, |
|
"grad_norm": 0.3032080033620836, |
|
"learning_rate": 3.0863863131272265e-05, |
|
"loss": 1.2317, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.4183283835383964, |
|
"grad_norm": 0.2678380639415362, |
|
"learning_rate": 3.07197338555178e-05, |
|
"loss": 1.2466, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.426813746287654, |
|
"grad_norm": 0.3000338098809928, |
|
"learning_rate": 3.0575403689593016e-05, |
|
"loss": 1.2469, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.435299109036911, |
|
"grad_norm": 0.2885428511714088, |
|
"learning_rate": 3.043087770270435e-05, |
|
"loss": 1.241, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.4437844717861688, |
|
"grad_norm": 0.2902606566366597, |
|
"learning_rate": 3.0286160970935906e-05, |
|
"loss": 1.2498, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.4522698345354264, |
|
"grad_norm": 0.2930924599960876, |
|
"learning_rate": 3.0141258577071184e-05, |
|
"loss": 1.2508, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.460755197284684, |
|
"grad_norm": 0.28761403953538467, |
|
"learning_rate": 2.9996175610414572e-05, |
|
"loss": 1.2379, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.4692405600339415, |
|
"grad_norm": 0.28888693356528744, |
|
"learning_rate": 2.9850917166612586e-05, |
|
"loss": 1.2383, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.477725922783199, |
|
"grad_norm": 0.29714323219094924, |
|
"learning_rate": 2.9705488347474896e-05, |
|
"loss": 1.2221, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.4862112855324563, |
|
"grad_norm": 0.3024332099011336, |
|
"learning_rate": 2.9559894260795144e-05, |
|
"loss": 1.2417, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.494696648281714, |
|
"grad_norm": 0.2900123354730048, |
|
"learning_rate": 2.9414140020171554e-05, |
|
"loss": 1.2543, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.5031820110309715, |
|
"grad_norm": 0.30122390943433014, |
|
"learning_rate": 2.926823074482733e-05, |
|
"loss": 1.2542, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.511667373780229, |
|
"grad_norm": 0.2860208265471049, |
|
"learning_rate": 2.912217155943083e-05, |
|
"loss": 1.2335, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.5201527365294867, |
|
"grad_norm": 0.28980498979259595, |
|
"learning_rate": 2.897596759391561e-05, |
|
"loss": 1.2458, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.5286380992787443, |
|
"grad_norm": 0.30074882444504475, |
|
"learning_rate": 2.8829623983300242e-05, |
|
"loss": 1.2498, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.537123462028002, |
|
"grad_norm": 0.2929721105596463, |
|
"learning_rate": 2.868314586750794e-05, |
|
"loss": 1.2686, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.5456088247772595, |
|
"grad_norm": 0.291755235343187, |
|
"learning_rate": 2.853653839118605e-05, |
|
"loss": 1.2456, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.5456088247772595, |
|
"eval_loss": 1.4051239490509033, |
|
"eval_runtime": 52.7875, |
|
"eval_samples_per_second": 7.218, |
|
"eval_steps_per_second": 0.909, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.5540941875265166, |
|
"grad_norm": 0.3056527705148328, |
|
"learning_rate": 2.8389806703525383e-05, |
|
"loss": 1.2321, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.562579550275774, |
|
"grad_norm": 0.29756401069688737, |
|
"learning_rate": 2.8242955958079303e-05, |
|
"loss": 1.2341, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.571064913025032, |
|
"grad_norm": 0.3077048874608071, |
|
"learning_rate": 2.809599131258276e-05, |
|
"loss": 1.2475, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.5795502757742894, |
|
"grad_norm": 0.31006088313098146, |
|
"learning_rate": 2.7948917928771158e-05, |
|
"loss": 1.2381, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.588035638523547, |
|
"grad_norm": 0.3086227102652305, |
|
"learning_rate": 2.7801740972199014e-05, |
|
"loss": 1.2386, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.596521001272804, |
|
"grad_norm": 0.2909420805400902, |
|
"learning_rate": 2.7654465612058573e-05, |
|
"loss": 1.2071, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.6050063640220618, |
|
"grad_norm": 0.30310956499188235, |
|
"learning_rate": 2.7507097020998246e-05, |
|
"loss": 1.2206, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.6134917267713194, |
|
"grad_norm": 0.2873915382033808, |
|
"learning_rate": 2.7359640374940904e-05, |
|
"loss": 1.2346, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.621977089520577, |
|
"grad_norm": 0.29404028686651285, |
|
"learning_rate": 2.7212100852902133e-05, |
|
"loss": 1.2209, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.6304624522698346, |
|
"grad_norm": 0.2967558623710032, |
|
"learning_rate": 2.7064483636808313e-05, |
|
"loss": 1.2471, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.638947815019092, |
|
"grad_norm": 0.28348844201193973, |
|
"learning_rate": 2.6916793911314593e-05, |
|
"loss": 1.2271, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.6474331777683497, |
|
"grad_norm": 0.30908939180701456, |
|
"learning_rate": 2.6769036863622842e-05, |
|
"loss": 1.2348, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.6559185405176073, |
|
"grad_norm": 0.296064586506253, |
|
"learning_rate": 2.6621217683299437e-05, |
|
"loss": 1.2118, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.6644039032668645, |
|
"grad_norm": 0.29223118946191284, |
|
"learning_rate": 2.647334156209299e-05, |
|
"loss": 1.2368, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.672889266016122, |
|
"grad_norm": 0.2974562276968823, |
|
"learning_rate": 2.6325413693752004e-05, |
|
"loss": 1.2392, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.6813746287653797, |
|
"grad_norm": 0.30862646184519243, |
|
"learning_rate": 2.6177439273842463e-05, |
|
"loss": 1.244, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.6898599915146373, |
|
"grad_norm": 0.2958164221091078, |
|
"learning_rate": 2.602942349956536e-05, |
|
"loss": 1.2377, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.698345354263895, |
|
"grad_norm": 0.2941370782364945, |
|
"learning_rate": 2.5881371569574125e-05, |
|
"loss": 1.2296, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.7068307170131525, |
|
"grad_norm": 0.2949825785995608, |
|
"learning_rate": 2.5733288683792084e-05, |
|
"loss": 1.2292, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.7153160797624096, |
|
"grad_norm": 0.3020705479686205, |
|
"learning_rate": 2.558518004322979e-05, |
|
"loss": 1.2371, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.7238014425116672, |
|
"grad_norm": 0.3134012668403533, |
|
"learning_rate": 2.5437050849802356e-05, |
|
"loss": 1.2257, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.732286805260925, |
|
"grad_norm": 0.320012175903156, |
|
"learning_rate": 2.528890630614677e-05, |
|
"loss": 1.215, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.7407721680101824, |
|
"grad_norm": 0.2936053513063533, |
|
"learning_rate": 2.514075161543915e-05, |
|
"loss": 1.2364, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.74925753075944, |
|
"grad_norm": 0.30515854484741317, |
|
"learning_rate": 2.499259198121201e-05, |
|
"loss": 1.2117, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.7577428935086976, |
|
"grad_norm": 0.29763910785937486, |
|
"learning_rate": 2.484443260717147e-05, |
|
"loss": 1.2583, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.766228256257955, |
|
"grad_norm": 0.2975895109826329, |
|
"learning_rate": 2.4696278697014538e-05, |
|
"loss": 1.2153, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.774713619007213, |
|
"grad_norm": 0.2778934357454163, |
|
"learning_rate": 2.4548135454246306e-05, |
|
"loss": 1.2291, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.78319898175647, |
|
"grad_norm": 0.32172302391314234, |
|
"learning_rate": 2.4400008081997196e-05, |
|
"loss": 1.214, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.7916843445057276, |
|
"grad_norm": 0.3008060855751439, |
|
"learning_rate": 2.425190178284024e-05, |
|
"loss": 1.234, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.800169707254985, |
|
"grad_norm": 0.3093107834664431, |
|
"learning_rate": 2.4103821758608307e-05, |
|
"loss": 1.2492, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.8086550700042427, |
|
"grad_norm": 0.30554454329766617, |
|
"learning_rate": 2.3955773210211465e-05, |
|
"loss": 1.2401, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.8171404327535003, |
|
"grad_norm": 0.2997812898388053, |
|
"learning_rate": 2.380776133745425e-05, |
|
"loss": 1.2089, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.8256257955027575, |
|
"grad_norm": 0.2962123179493644, |
|
"learning_rate": 2.3659791338853066e-05, |
|
"loss": 1.2324, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.834111158252015, |
|
"grad_norm": 0.31328021490753843, |
|
"learning_rate": 2.3511868411453623e-05, |
|
"loss": 1.214, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.8425965210012727, |
|
"grad_norm": 0.30414013736166967, |
|
"learning_rate": 2.3363997750648357e-05, |
|
"loss": 1.2142, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.8510818837505303, |
|
"grad_norm": 0.3079683108015467, |
|
"learning_rate": 2.3216184549994006e-05, |
|
"loss": 1.2137, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.859567246499788, |
|
"grad_norm": 0.31165537422962203, |
|
"learning_rate": 2.3068434001029173e-05, |
|
"loss": 1.1915, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.8680526092490455, |
|
"grad_norm": 0.3190451157121095, |
|
"learning_rate": 2.2920751293091948e-05, |
|
"loss": 1.2193, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.876537971998303, |
|
"grad_norm": 0.30247955079343214, |
|
"learning_rate": 2.277314161313774e-05, |
|
"loss": 1.2253, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.8850233347475607, |
|
"grad_norm": 0.2936629891547958, |
|
"learning_rate": 2.262561014555703e-05, |
|
"loss": 1.2136, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.893508697496818, |
|
"grad_norm": 0.3001872886250926, |
|
"learning_rate": 2.2478162071993298e-05, |
|
"loss": 1.2061, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 2.9019940602460754, |
|
"grad_norm": 0.3111993397471538, |
|
"learning_rate": 2.233080257116103e-05, |
|
"loss": 1.2193, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 2.910479422995333, |
|
"grad_norm": 0.3119972602479391, |
|
"learning_rate": 2.2183536818663856e-05, |
|
"loss": 1.2125, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 2.9189647857445906, |
|
"grad_norm": 0.30787460159489605, |
|
"learning_rate": 2.2036369986812713e-05, |
|
"loss": 1.215, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 2.927450148493848, |
|
"grad_norm": 0.3175794611301324, |
|
"learning_rate": 2.1889307244444252e-05, |
|
"loss": 1.2202, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.9359355112431054, |
|
"grad_norm": 0.2956361362699715, |
|
"learning_rate": 2.1742353756739247e-05, |
|
"loss": 1.2125, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 2.944420873992363, |
|
"grad_norm": 0.33942331815482624, |
|
"learning_rate": 2.1595514685041205e-05, |
|
"loss": 1.2173, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 2.9529062367416206, |
|
"grad_norm": 0.332180923100718, |
|
"learning_rate": 2.144879518667507e-05, |
|
"loss": 1.2266, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 2.961391599490878, |
|
"grad_norm": 0.3157083099981729, |
|
"learning_rate": 2.1302200414766123e-05, |
|
"loss": 1.2154, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 2.9698769622401358, |
|
"grad_norm": 0.3149298205272042, |
|
"learning_rate": 2.1155735518058914e-05, |
|
"loss": 1.2232, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.9783623249893934, |
|
"grad_norm": 0.3132779789709915, |
|
"learning_rate": 2.100940564073653e-05, |
|
"loss": 1.2299, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 2.986847687738651, |
|
"grad_norm": 0.29387121758746726, |
|
"learning_rate": 2.086321592223984e-05, |
|
"loss": 1.2219, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 2.9953330504879085, |
|
"grad_norm": 0.2954282506485496, |
|
"learning_rate": 2.0717171497087014e-05, |
|
"loss": 1.2321, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 3.0038184132371657, |
|
"grad_norm": 0.2991107469221935, |
|
"learning_rate": 2.057127749469321e-05, |
|
"loss": 1.2387, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 3.0123037759864233, |
|
"grad_norm": 0.3135732189536929, |
|
"learning_rate": 2.042553903919036e-05, |
|
"loss": 1.1058, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 3.020789138735681, |
|
"grad_norm": 0.3179199807851635, |
|
"learning_rate": 2.0279961249247274e-05, |
|
"loss": 1.0677, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 3.0292745014849385, |
|
"grad_norm": 0.3217398418422315, |
|
"learning_rate": 2.0134549237889765e-05, |
|
"loss": 1.0978, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 3.037759864234196, |
|
"grad_norm": 0.32343352041544976, |
|
"learning_rate": 1.9989308112321164e-05, |
|
"loss": 1.0791, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 3.0462452269834537, |
|
"grad_norm": 0.32321267638520695, |
|
"learning_rate": 1.9844242973742886e-05, |
|
"loss": 1.0991, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 3.0547305897327113, |
|
"grad_norm": 0.321242777420917, |
|
"learning_rate": 1.9699358917175297e-05, |
|
"loss": 1.104, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 3.0547305897327113, |
|
"eval_loss": 1.3918192386627197, |
|
"eval_runtime": 52.2671, |
|
"eval_samples_per_second": 7.289, |
|
"eval_steps_per_second": 0.918, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 3.0632159524819684, |
|
"grad_norm": 0.3313441880623986, |
|
"learning_rate": 1.9554661031278712e-05, |
|
"loss": 1.081, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 3.071701315231226, |
|
"grad_norm": 0.3866269219140372, |
|
"learning_rate": 1.9410154398174742e-05, |
|
"loss": 1.0826, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 3.0801866779804836, |
|
"grad_norm": 0.32719602353062216, |
|
"learning_rate": 1.9265844093267728e-05, |
|
"loss": 1.0934, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 3.088672040729741, |
|
"grad_norm": 0.3301678171750988, |
|
"learning_rate": 1.9121735185066537e-05, |
|
"loss": 1.1047, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 3.097157403478999, |
|
"grad_norm": 0.35644796539197005, |
|
"learning_rate": 1.8977832735006522e-05, |
|
"loss": 1.0994, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 3.1056427662282564, |
|
"grad_norm": 0.33733028940251475, |
|
"learning_rate": 1.8834141797271742e-05, |
|
"loss": 1.0972, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 3.114128128977514, |
|
"grad_norm": 0.32548774712269085, |
|
"learning_rate": 1.8690667418617462e-05, |
|
"loss": 1.1046, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 3.122613491726771, |
|
"grad_norm": 0.3117109384467469, |
|
"learning_rate": 1.854741463819291e-05, |
|
"loss": 1.0791, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 3.1310988544760288, |
|
"grad_norm": 0.3240987091142989, |
|
"learning_rate": 1.8404388487364242e-05, |
|
"loss": 1.0824, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 3.1395842172252864, |
|
"grad_norm": 0.3346401099182515, |
|
"learning_rate": 1.8261593989537895e-05, |
|
"loss": 1.0753, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 3.148069579974544, |
|
"grad_norm": 0.3270030472928521, |
|
"learning_rate": 1.81190361599841e-05, |
|
"loss": 1.0934, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 3.1565549427238015, |
|
"grad_norm": 0.34129438803355183, |
|
"learning_rate": 1.797672000566077e-05, |
|
"loss": 1.0766, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 3.165040305473059, |
|
"grad_norm": 0.37057591586243926, |
|
"learning_rate": 1.783465052503762e-05, |
|
"loss": 1.1049, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 3.1735256682223163, |
|
"grad_norm": 0.3331237337555744, |
|
"learning_rate": 1.769283270792065e-05, |
|
"loss": 1.0876, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 3.182011030971574, |
|
"grad_norm": 0.31904083541369294, |
|
"learning_rate": 1.7551271535276792e-05, |
|
"loss": 1.1206, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 3.1904963937208315, |
|
"grad_norm": 0.35953020953263576, |
|
"learning_rate": 1.74099719790591e-05, |
|
"loss": 1.0736, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 3.198981756470089, |
|
"grad_norm": 0.33595544857573634, |
|
"learning_rate": 1.7268939002032035e-05, |
|
"loss": 1.0969, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 3.2074671192193467, |
|
"grad_norm": 0.34180259956288195, |
|
"learning_rate": 1.7128177557597185e-05, |
|
"loss": 1.0972, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 3.2159524819686043, |
|
"grad_norm": 0.3268247890892541, |
|
"learning_rate": 1.6987692589619304e-05, |
|
"loss": 1.0737, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 3.224437844717862, |
|
"grad_norm": 0.3207792660130559, |
|
"learning_rate": 1.6847489032252627e-05, |
|
"loss": 1.0797, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 3.232923207467119, |
|
"grad_norm": 0.3467270107463577, |
|
"learning_rate": 1.6707571809767644e-05, |
|
"loss": 1.1024, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 3.2414085702163766, |
|
"grad_norm": 0.35579083917156773, |
|
"learning_rate": 1.656794583637807e-05, |
|
"loss": 1.1026, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 3.2498939329656342, |
|
"grad_norm": 0.3339161504484877, |
|
"learning_rate": 1.6428616016068304e-05, |
|
"loss": 1.0866, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 3.258379295714892, |
|
"grad_norm": 0.34920082505086103, |
|
"learning_rate": 1.628958724242117e-05, |
|
"loss": 1.0789, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 3.2668646584641494, |
|
"grad_norm": 0.34847114740056345, |
|
"learning_rate": 1.615086439844604e-05, |
|
"loss": 1.0849, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 3.275350021213407, |
|
"grad_norm": 0.3164317420955498, |
|
"learning_rate": 1.601245235640733e-05, |
|
"loss": 1.0984, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 3.283835383962664, |
|
"grad_norm": 0.32036510223283066, |
|
"learning_rate": 1.5874355977653392e-05, |
|
"loss": 1.1122, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 3.2923207467119218, |
|
"grad_norm": 0.3618051784441363, |
|
"learning_rate": 1.5736580112445738e-05, |
|
"loss": 1.0942, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 3.3008061094611794, |
|
"grad_norm": 0.3376776049321294, |
|
"learning_rate": 1.559912959978872e-05, |
|
"loss": 1.0898, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 3.309291472210437, |
|
"grad_norm": 0.3402374603685398, |
|
"learning_rate": 1.546200926725958e-05, |
|
"loss": 1.1061, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 3.3177768349596946, |
|
"grad_norm": 0.32072793842909514, |
|
"learning_rate": 1.5325223930838838e-05, |
|
"loss": 1.0995, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 3.326262197708952, |
|
"grad_norm": 0.336633795191635, |
|
"learning_rate": 1.518877839474122e-05, |
|
"loss": 1.0971, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 3.3347475604582097, |
|
"grad_norm": 0.36554031780441654, |
|
"learning_rate": 1.5052677451246877e-05, |
|
"loss": 1.0867, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 3.3432329232074673, |
|
"grad_norm": 0.3569408014521597, |
|
"learning_rate": 1.491692588053305e-05, |
|
"loss": 1.0992, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 3.3517182859567245, |
|
"grad_norm": 0.33844743386504, |
|
"learning_rate": 1.4781528450506232e-05, |
|
"loss": 1.103, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 3.360203648705982, |
|
"grad_norm": 0.31581742738052115, |
|
"learning_rate": 1.4646489916634687e-05, |
|
"loss": 1.0843, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 3.3686890114552397, |
|
"grad_norm": 0.3882340052077705, |
|
"learning_rate": 1.4511815021781411e-05, |
|
"loss": 1.1146, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 3.3771743742044973, |
|
"grad_norm": 0.3309843437274728, |
|
"learning_rate": 1.4377508496037567e-05, |
|
"loss": 1.0751, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 3.385659736953755, |
|
"grad_norm": 0.34050270685357675, |
|
"learning_rate": 1.4243575056556355e-05, |
|
"loss": 1.0995, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 3.3941450997030125, |
|
"grad_norm": 0.3459308872000444, |
|
"learning_rate": 1.4110019407387315e-05, |
|
"loss": 1.0817, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.4026304624522696, |
|
"grad_norm": 0.337212090487825, |
|
"learning_rate": 1.3976846239311128e-05, |
|
"loss": 1.1086, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 3.4111158252015272, |
|
"grad_norm": 0.3351805063766743, |
|
"learning_rate": 1.384406022967489e-05, |
|
"loss": 1.0919, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 3.419601187950785, |
|
"grad_norm": 0.3398996417599571, |
|
"learning_rate": 1.3711666042227772e-05, |
|
"loss": 1.0808, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 3.4280865507000424, |
|
"grad_norm": 0.3585847037493432, |
|
"learning_rate": 1.357966832695725e-05, |
|
"loss": 1.1169, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 3.4365719134493, |
|
"grad_norm": 0.3229408856119979, |
|
"learning_rate": 1.3448071719925826e-05, |
|
"loss": 1.1041, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 3.4450572761985576, |
|
"grad_norm": 0.34474446572567957, |
|
"learning_rate": 1.331688084310812e-05, |
|
"loss": 1.0772, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 3.453542638947815, |
|
"grad_norm": 0.3189660506507834, |
|
"learning_rate": 1.3186100304228594e-05, |
|
"loss": 1.1082, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 3.4620280016970724, |
|
"grad_norm": 0.3609045764903581, |
|
"learning_rate": 1.3055734696599686e-05, |
|
"loss": 1.0815, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 3.47051336444633, |
|
"grad_norm": 0.35237020522576973, |
|
"learning_rate": 1.292578859896053e-05, |
|
"loss": 1.1176, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 3.4789987271955876, |
|
"grad_norm": 0.3189367280387807, |
|
"learning_rate": 1.2796266575316069e-05, |
|
"loss": 1.0826, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 3.487484089944845, |
|
"grad_norm": 0.34730662545931273, |
|
"learning_rate": 1.2667173174776823e-05, |
|
"loss": 1.0908, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 3.4959694526941028, |
|
"grad_norm": 0.34138804654797594, |
|
"learning_rate": 1.2538512931399072e-05, |
|
"loss": 1.0769, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 3.5044548154433603, |
|
"grad_norm": 0.33424304735568794, |
|
"learning_rate": 1.2410290364025623e-05, |
|
"loss": 1.0795, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 3.5129401781926175, |
|
"grad_norm": 0.3313504435273678, |
|
"learning_rate": 1.2282509976127098e-05, |
|
"loss": 1.0878, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 3.521425540941875, |
|
"grad_norm": 0.3233457240542579, |
|
"learning_rate": 1.215517625564376e-05, |
|
"loss": 1.0892, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 3.5299109036911327, |
|
"grad_norm": 0.36342717886744974, |
|
"learning_rate": 1.2028293674827909e-05, |
|
"loss": 1.1121, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 3.5383962664403903, |
|
"grad_norm": 0.3384633320066937, |
|
"learning_rate": 1.1901866690086747e-05, |
|
"loss": 1.0873, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 3.546881629189648, |
|
"grad_norm": 0.3478508212091021, |
|
"learning_rate": 1.1775899741825947e-05, |
|
"loss": 1.1074, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 3.5553669919389055, |
|
"grad_norm": 0.33728300112135334, |
|
"learning_rate": 1.1650397254293583e-05, |
|
"loss": 1.0962, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 3.563852354688163, |
|
"grad_norm": 0.3237234384231759, |
|
"learning_rate": 1.1525363635424863e-05, |
|
"loss": 1.0918, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 3.563852354688163, |
|
"eval_loss": 1.3741682767868042, |
|
"eval_runtime": 52.4151, |
|
"eval_samples_per_second": 7.269, |
|
"eval_steps_per_second": 0.916, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 3.5723377174374207, |
|
"grad_norm": 0.35478171271989195, |
|
"learning_rate": 1.1400803276687208e-05, |
|
"loss": 1.0845, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 3.580823080186678, |
|
"grad_norm": 0.37888228352789066, |
|
"learning_rate": 1.1276720552926096e-05, |
|
"loss": 1.0622, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 3.5893084429359354, |
|
"grad_norm": 0.3279788866194937, |
|
"learning_rate": 1.1153119822211338e-05, |
|
"loss": 1.1021, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 3.597793805685193, |
|
"grad_norm": 0.3240797704044235, |
|
"learning_rate": 1.103000542568406e-05, |
|
"loss": 1.0931, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 3.6062791684344506, |
|
"grad_norm": 0.3326855671061254, |
|
"learning_rate": 1.0907381687404206e-05, |
|
"loss": 1.0856, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 3.614764531183708, |
|
"grad_norm": 0.3667803858944268, |
|
"learning_rate": 1.0785252914198676e-05, |
|
"loss": 1.0987, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 3.6232498939329654, |
|
"grad_norm": 0.36473787559668763, |
|
"learning_rate": 1.0663623395510087e-05, |
|
"loss": 1.0817, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 3.631735256682223, |
|
"grad_norm": 0.3409471312744712, |
|
"learning_rate": 1.0542497403246055e-05, |
|
"loss": 1.0817, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 3.6402206194314806, |
|
"grad_norm": 0.3432913496959211, |
|
"learning_rate": 1.0421879191629227e-05, |
|
"loss": 1.1028, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 3.648705982180738, |
|
"grad_norm": 0.3432540649401779, |
|
"learning_rate": 1.0301772997047809e-05, |
|
"loss": 1.1015, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 3.6571913449299958, |
|
"grad_norm": 0.33346746301829316, |
|
"learning_rate": 1.0182183037906799e-05, |
|
"loss": 1.0731, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 3.6656767076792534, |
|
"grad_norm": 0.3413502676678206, |
|
"learning_rate": 1.0063113514479809e-05, |
|
"loss": 1.0712, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 3.674162070428511, |
|
"grad_norm": 0.3285225243235751, |
|
"learning_rate": 9.94456860876159e-06, |
|
"loss": 1.0848, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 3.6826474331777685, |
|
"grad_norm": 0.3388803496844761, |
|
"learning_rate": 9.826552484321087e-06, |
|
"loss": 1.0819, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 3.691132795927026, |
|
"grad_norm": 0.33054778123683576, |
|
"learning_rate": 9.709069286155231e-06, |
|
"loss": 1.0865, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 3.6996181586762833, |
|
"grad_norm": 0.33512423378595196, |
|
"learning_rate": 9.592123140543388e-06, |
|
"loss": 1.0799, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 3.708103521425541, |
|
"grad_norm": 0.33282736103466287, |
|
"learning_rate": 9.475718154902382e-06, |
|
"loss": 1.089, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 3.7165888841747985, |
|
"grad_norm": 0.33746488116219714, |
|
"learning_rate": 9.359858417642266e-06, |
|
"loss": 1.1047, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 3.725074246924056, |
|
"grad_norm": 0.35251902658665213, |
|
"learning_rate": 9.244547998022709e-06, |
|
"loss": 1.0897, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 3.7335596096733137, |
|
"grad_norm": 0.32061079556576844, |
|
"learning_rate": 9.12979094601011e-06, |
|
"loss": 1.0873, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 3.742044972422571, |
|
"grad_norm": 0.3339779862802376, |
|
"learning_rate": 9.0155912921353e-06, |
|
"loss": 1.0976, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 3.7505303351718284, |
|
"grad_norm": 0.33137105564248603, |
|
"learning_rate": 8.901953047352032e-06, |
|
"loss": 1.0815, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 3.759015697921086, |
|
"grad_norm": 0.3421991932531284, |
|
"learning_rate": 8.788880202896072e-06, |
|
"loss": 1.0962, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 3.7675010606703436, |
|
"grad_norm": 0.34520719229693314, |
|
"learning_rate": 8.676376730145031e-06, |
|
"loss": 1.0862, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 3.7759864234196012, |
|
"grad_norm": 0.3472744469623473, |
|
"learning_rate": 8.564446580478877e-06, |
|
"loss": 1.0906, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 3.784471786168859, |
|
"grad_norm": 0.3247372551943734, |
|
"learning_rate": 8.453093685141156e-06, |
|
"loss": 1.0892, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 3.7929571489181164, |
|
"grad_norm": 0.32380813984188905, |
|
"learning_rate": 8.342321955100935e-06, |
|
"loss": 1.1084, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 3.801442511667374, |
|
"grad_norm": 0.3379339847312466, |
|
"learning_rate": 8.232135280915398e-06, |
|
"loss": 1.0751, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 3.809927874416631, |
|
"grad_norm": 0.34446270018278696, |
|
"learning_rate": 8.122537532593264e-06, |
|
"loss": 1.0934, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 3.8184132371658888, |
|
"grad_norm": 0.34800470636637415, |
|
"learning_rate": 8.013532559458761e-06, |
|
"loss": 1.0896, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 3.8268985999151464, |
|
"grad_norm": 0.34720788545028275, |
|
"learning_rate": 7.90512419001656e-06, |
|
"loss": 1.0755, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 3.835383962664404, |
|
"grad_norm": 0.32618575043616216, |
|
"learning_rate": 7.797316231817198e-06, |
|
"loss": 1.0897, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 3.8438693254136616, |
|
"grad_norm": 0.3301952338609731, |
|
"learning_rate": 7.690112471323419e-06, |
|
"loss": 1.0737, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 3.8523546881629187, |
|
"grad_norm": 0.36691506455432976, |
|
"learning_rate": 7.583516673777141e-06, |
|
"loss": 1.0749, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 3.8608400509121763, |
|
"grad_norm": 0.32038136896218583, |
|
"learning_rate": 7.477532583067234e-06, |
|
"loss": 1.0945, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 3.869325413661434, |
|
"grad_norm": 0.3612002616743919, |
|
"learning_rate": 7.3721639215980216e-06, |
|
"loss": 1.0869, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 3.8778107764106915, |
|
"grad_norm": 0.32001031069324903, |
|
"learning_rate": 7.267414390158533e-06, |
|
"loss": 1.0587, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 3.886296139159949, |
|
"grad_norm": 0.3397263736848682, |
|
"learning_rate": 7.163287667792557e-06, |
|
"loss": 1.0815, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 3.8947815019092067, |
|
"grad_norm": 0.3376479038706788, |
|
"learning_rate": 7.059787411669361e-06, |
|
"loss": 1.0966, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 3.9032668646584643, |
|
"grad_norm": 0.3361477045154485, |
|
"learning_rate": 6.9569172569553195e-06, |
|
"loss": 1.095, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 3.911752227407722, |
|
"grad_norm": 0.3200411096233463, |
|
"learning_rate": 6.854680816686177e-06, |
|
"loss": 1.0547, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 3.9202375901569795, |
|
"grad_norm": 0.31629554430499285, |
|
"learning_rate": 6.7530816816401745e-06, |
|
"loss": 1.0722, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 3.9287229529062366, |
|
"grad_norm": 0.41055357604639015, |
|
"learning_rate": 6.6521234202119396e-06, |
|
"loss": 1.0598, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 3.9372083156554942, |
|
"grad_norm": 0.3066099811651359, |
|
"learning_rate": 6.551809578287138e-06, |
|
"loss": 1.0796, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 3.945693678404752, |
|
"grad_norm": 0.34352050628293235, |
|
"learning_rate": 6.452143679117964e-06, |
|
"loss": 1.0635, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 3.9541790411540094, |
|
"grad_norm": 0.33858419417369884, |
|
"learning_rate": 6.353129223199353e-06, |
|
"loss": 1.0939, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 3.962664403903267, |
|
"grad_norm": 0.3421642104660851, |
|
"learning_rate": 6.2547696881460835e-06, |
|
"loss": 1.0852, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 3.971149766652524, |
|
"grad_norm": 0.3146569348417928, |
|
"learning_rate": 6.157068528570592e-06, |
|
"loss": 1.0858, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 3.9796351294017818, |
|
"grad_norm": 0.33303947321847055, |
|
"learning_rate": 6.060029175961665e-06, |
|
"loss": 1.0894, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 3.9881204921510394, |
|
"grad_norm": 0.3246201166003683, |
|
"learning_rate": 5.963655038563904e-06, |
|
"loss": 1.0713, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 3.996605854900297, |
|
"grad_norm": 0.33668268336604734, |
|
"learning_rate": 5.867949501258047e-06, |
|
"loss": 1.0714, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 4.005091217649555, |
|
"grad_norm": 0.340739931299029, |
|
"learning_rate": 5.7729159254420405e-06, |
|
"loss": 1.1111, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 4.013576580398812, |
|
"grad_norm": 0.4123963053921562, |
|
"learning_rate": 5.678557648913007e-06, |
|
"loss": 1.0235, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 4.02206194314807, |
|
"grad_norm": 0.3503972385770886, |
|
"learning_rate": 5.584877985750036e-06, |
|
"loss": 1.0207, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 4.030547305897327, |
|
"grad_norm": 0.36492718284781545, |
|
"learning_rate": 5.491880226197707e-06, |
|
"loss": 1.0065, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 4.039032668646585, |
|
"grad_norm": 0.3558493920675881, |
|
"learning_rate": 5.399567636550634e-06, |
|
"loss": 1.012, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 4.0475180313958425, |
|
"grad_norm": 0.35265155864184533, |
|
"learning_rate": 5.307943459038656e-06, |
|
"loss": 1.0042, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 4.0560033941451, |
|
"grad_norm": 0.3628265724559059, |
|
"learning_rate": 5.217010911713022e-06, |
|
"loss": 1.0138, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 4.064488756894357, |
|
"grad_norm": 0.366016038213362, |
|
"learning_rate": 5.126773188333326e-06, |
|
"loss": 1.0055, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 4.072974119643614, |
|
"grad_norm": 0.3663550294337188, |
|
"learning_rate": 5.037233458255375e-06, |
|
"loss": 1.0143, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 4.072974119643614, |
|
"eval_loss": 1.3839582204818726, |
|
"eval_runtime": 52.5375, |
|
"eval_samples_per_second": 7.252, |
|
"eval_steps_per_second": 0.914, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 4.081459482392872, |
|
"grad_norm": 0.3734159050536221, |
|
"learning_rate": 4.9483948663198106e-06, |
|
"loss": 1.0256, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 4.08994484514213, |
|
"grad_norm": 0.3756787343497328, |
|
"learning_rate": 4.860260532741739e-06, |
|
"loss": 0.9939, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 4.098430207891387, |
|
"grad_norm": 0.3641062929794199, |
|
"learning_rate": 4.7728335530010684e-06, |
|
"loss": 1.0016, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 4.106915570640645, |
|
"grad_norm": 0.3720412035298521, |
|
"learning_rate": 4.686116997733822e-06, |
|
"loss": 1.0099, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 4.115400933389902, |
|
"grad_norm": 0.3494502732717846, |
|
"learning_rate": 4.600113912624305e-06, |
|
"loss": 0.9903, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 4.12388629613916, |
|
"grad_norm": 0.3847064809020709, |
|
"learning_rate": 4.514827318298087e-06, |
|
"loss": 1.0109, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 4.132371658888418, |
|
"grad_norm": 0.3724702679939693, |
|
"learning_rate": 4.430260210215972e-06, |
|
"loss": 1.0172, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 4.140857021637675, |
|
"grad_norm": 0.3854607242705416, |
|
"learning_rate": 4.346415558568712e-06, |
|
"loss": 1.014, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 4.149342384386933, |
|
"grad_norm": 0.4119725224707018, |
|
"learning_rate": 4.263296308172774e-06, |
|
"loss": 1.0037, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 4.15782774713619, |
|
"grad_norm": 0.38075029944179556, |
|
"learning_rate": 4.180905378366845e-06, |
|
"loss": 1.0004, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 4.166313109885448, |
|
"grad_norm": 0.3788138960181575, |
|
"learning_rate": 4.099245662909338e-06, |
|
"loss": 0.9996, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 4.174798472634705, |
|
"grad_norm": 0.5528304971596814, |
|
"learning_rate": 4.018320029876729e-06, |
|
"loss": 0.9861, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 4.183283835383962, |
|
"grad_norm": 0.38085596177818104, |
|
"learning_rate": 3.938131321562841e-06, |
|
"loss": 1.0123, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 4.19176919813322, |
|
"grad_norm": 0.4065521249170979, |
|
"learning_rate": 3.858682354379012e-06, |
|
"loss": 1.0124, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 4.2002545608824775, |
|
"grad_norm": 0.37820891516953525, |
|
"learning_rate": 3.7799759187551603e-06, |
|
"loss": 0.9985, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 4.208739923631735, |
|
"grad_norm": 0.38792497529901426, |
|
"learning_rate": 3.7020147790418263e-06, |
|
"loss": 1.0166, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 4.217225286380993, |
|
"grad_norm": 0.37981175823185026, |
|
"learning_rate": 3.624801673413011e-06, |
|
"loss": 1.0012, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 4.22571064913025, |
|
"grad_norm": 0.3752506606207182, |
|
"learning_rate": 3.5483393137700717e-06, |
|
"loss": 0.9926, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 4.234196011879508, |
|
"grad_norm": 0.36701501672766035, |
|
"learning_rate": 3.4726303856464306e-06, |
|
"loss": 0.9998, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 4.2426813746287655, |
|
"grad_norm": 0.37096066532390476, |
|
"learning_rate": 3.3976775481132678e-06, |
|
"loss": 1.004, |
|
"step": 5000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 5890, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4203278871035904.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|