|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 9.977220956719817, |
|
"eval_steps": 500, |
|
"global_step": 2190, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004555808656036446, |
|
"grad_norm": 362.0, |
|
"learning_rate": 9.132420091324201e-07, |
|
"loss": 46.6393, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.022779043280182234, |
|
"grad_norm": 372.0, |
|
"learning_rate": 4.566210045662101e-06, |
|
"loss": 46.947, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.04555808656036447, |
|
"grad_norm": 190.0, |
|
"learning_rate": 9.132420091324201e-06, |
|
"loss": 43.7966, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0683371298405467, |
|
"grad_norm": 128.0, |
|
"learning_rate": 1.3698630136986302e-05, |
|
"loss": 35.4793, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.09111617312072894, |
|
"grad_norm": 40.75, |
|
"learning_rate": 1.8264840182648402e-05, |
|
"loss": 28.4066, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.11389521640091116, |
|
"grad_norm": 19.5, |
|
"learning_rate": 2.2831050228310503e-05, |
|
"loss": 24.6114, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1366742596810934, |
|
"grad_norm": 15.8125, |
|
"learning_rate": 2.7397260273972603e-05, |
|
"loss": 22.96, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.15945330296127563, |
|
"grad_norm": 9.375, |
|
"learning_rate": 3.1963470319634704e-05, |
|
"loss": 21.0704, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.18223234624145787, |
|
"grad_norm": 4.75, |
|
"learning_rate": 3.6529680365296805e-05, |
|
"loss": 19.7729, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.20501138952164008, |
|
"grad_norm": 4.59375, |
|
"learning_rate": 4.1095890410958905e-05, |
|
"loss": 19.4123, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.22779043280182232, |
|
"grad_norm": 5.125, |
|
"learning_rate": 4.5662100456621006e-05, |
|
"loss": 18.5253, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2505694760820046, |
|
"grad_norm": 7.25, |
|
"learning_rate": 5.0228310502283106e-05, |
|
"loss": 17.9153, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.2733485193621868, |
|
"grad_norm": 10.9375, |
|
"learning_rate": 5.479452054794521e-05, |
|
"loss": 16.7799, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.296127562642369, |
|
"grad_norm": 18.125, |
|
"learning_rate": 5.936073059360731e-05, |
|
"loss": 15.261, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.31890660592255127, |
|
"grad_norm": 28.875, |
|
"learning_rate": 6.392694063926941e-05, |
|
"loss": 11.9226, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3416856492027335, |
|
"grad_norm": 27.25, |
|
"learning_rate": 6.84931506849315e-05, |
|
"loss": 7.2453, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.36446469248291574, |
|
"grad_norm": 6.375, |
|
"learning_rate": 7.305936073059361e-05, |
|
"loss": 3.4058, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.38724373576309795, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 7.76255707762557e-05, |
|
"loss": 2.3455, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.41002277904328016, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 8.219178082191781e-05, |
|
"loss": 2.0168, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4328018223234624, |
|
"grad_norm": 4.5, |
|
"learning_rate": 8.67579908675799e-05, |
|
"loss": 1.7945, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.45558086560364464, |
|
"grad_norm": 0.79296875, |
|
"learning_rate": 9.132420091324201e-05, |
|
"loss": 1.6782, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4783599088838269, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 9.58904109589041e-05, |
|
"loss": 1.575, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.5011389521640092, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 0.00010045662100456621, |
|
"loss": 1.506, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5239179954441914, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 0.00010502283105022832, |
|
"loss": 1.47, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5466970387243736, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 0.00010958904109589041, |
|
"loss": 1.433, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5694760820045558, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 0.00011415525114155252, |
|
"loss": 1.392, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.592255125284738, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 0.00011872146118721462, |
|
"loss": 1.354, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.6150341685649203, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 0.0001232876712328767, |
|
"loss": 1.336, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6378132118451025, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.00012785388127853882, |
|
"loss": 1.3193, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6605922551252847, |
|
"grad_norm": 2.125, |
|
"learning_rate": 0.00013242009132420092, |
|
"loss": 1.3012, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.683371298405467, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 0.000136986301369863, |
|
"loss": 1.2792, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.7061503416856492, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 0.0001415525114155251, |
|
"loss": 1.2621, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.7289293849658315, |
|
"grad_norm": 1.5, |
|
"learning_rate": 0.00014611872146118722, |
|
"loss": 1.2575, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7517084282460137, |
|
"grad_norm": 15.1875, |
|
"learning_rate": 0.00015068493150684933, |
|
"loss": 1.2592, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.7744874715261959, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 0.0001552511415525114, |
|
"loss": 1.2579, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7972665148063781, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 0.00015981735159817351, |
|
"loss": 1.234, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.8200455580865603, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 0.00016438356164383562, |
|
"loss": 1.2226, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8428246013667426, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 0.00016894977168949773, |
|
"loss": 1.2259, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.8656036446469249, |
|
"grad_norm": 4.25, |
|
"learning_rate": 0.0001735159817351598, |
|
"loss": 1.227, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.8883826879271071, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 0.00017808219178082192, |
|
"loss": 1.2009, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.9111617312072893, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 0.00018264840182648402, |
|
"loss": 1.1779, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9339407744874715, |
|
"grad_norm": 2.75, |
|
"learning_rate": 0.00018721461187214613, |
|
"loss": 1.185, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.9567198177676538, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 0.0001917808219178082, |
|
"loss": 1.168, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.979498861047836, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 0.00019634703196347032, |
|
"loss": 1.1594, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.9977220956719818, |
|
"eval_loss": 2.6195228099823, |
|
"eval_runtime": 0.2786, |
|
"eval_samples_per_second": 35.888, |
|
"eval_steps_per_second": 3.589, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.0022779043280183, |
|
"grad_norm": 15.5625, |
|
"learning_rate": 0.00019999987297289245, |
|
"loss": 1.165, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.0250569476082005, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 0.00019999542705801296, |
|
"loss": 1.1666, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.0478359908883828, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 0.00019998463011046926, |
|
"loss": 1.168, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.070615034168565, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 0.00019996748281601038, |
|
"loss": 1.1708, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.0933940774487472, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 0.00019994398626371643, |
|
"loss": 1.1528, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.1161731207289294, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 0.0001999141419459293, |
|
"loss": 1.1295, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.1389521640091116, |
|
"grad_norm": 1.25, |
|
"learning_rate": 0.00019987795175815807, |
|
"loss": 1.1235, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.1617312072892938, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 0.0001998354179989585, |
|
"loss": 1.1176, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.184510250569476, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 0.0001997865433697871, |
|
"loss": 1.1074, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.2072892938496582, |
|
"grad_norm": 7.53125, |
|
"learning_rate": 0.00019973133097482947, |
|
"loss": 1.1124, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.2300683371298406, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 0.00019966978432080316, |
|
"loss": 1.1298, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.2528473804100229, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 0.00019960190731673505, |
|
"loss": 1.1047, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.275626423690205, |
|
"grad_norm": 1.75, |
|
"learning_rate": 0.00019952770427371304, |
|
"loss": 1.1036, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.2984054669703873, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 0.00019944717990461207, |
|
"loss": 1.0868, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.3211845102505695, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 0.00019936033932379504, |
|
"loss": 1.0943, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.3439635535307517, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 0.00019926718804678785, |
|
"loss": 1.098, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.366742596810934, |
|
"grad_norm": 2.375, |
|
"learning_rate": 0.000199167731989929, |
|
"loss": 1.1416, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.3895216400911161, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 0.00019906197746999408, |
|
"loss": 1.0961, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.4123006833712983, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 0.00019894993120379435, |
|
"loss": 1.0799, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.4350797266514808, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 0.00019883160030775016, |
|
"loss": 1.0671, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.4578587699316627, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 0.00019870699229743911, |
|
"loss": 1.0786, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.4806378132118452, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 0.0001985761150871185, |
|
"loss": 1.0807, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.5034168564920274, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 0.00019843897698922284, |
|
"loss": 1.0898, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.5261958997722096, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 0.00019829558671383585, |
|
"loss": 1.0734, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.5489749430523918, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 0.00019814595336813725, |
|
"loss": 1.0776, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.571753986332574, |
|
"grad_norm": 2.25, |
|
"learning_rate": 0.0001979900864558242, |
|
"loss": 1.0672, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.5945330296127562, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 0.00019782799587650805, |
|
"loss": 1.0553, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.6173120728929384, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 0.00019765969192508508, |
|
"loss": 1.0515, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.6400911161731209, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 0.00019748518529108316, |
|
"loss": 1.0499, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.6628701594533029, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 0.00019730448705798239, |
|
"loss": 1.0539, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.6856492027334853, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 0.00019711760870251143, |
|
"loss": 1.0605, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.7084282460136673, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 0.00019692456209391846, |
|
"loss": 1.0732, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.7312072892938497, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 0.0001967253594932173, |
|
"loss": 1.0485, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.753986332574032, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 0.00019652001355240878, |
|
"loss": 1.0665, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.7767653758542141, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 0.00019630853731367713, |
|
"loss": 1.0505, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.7995444191343963, |
|
"grad_norm": 8.125, |
|
"learning_rate": 0.0001960909442085615, |
|
"loss": 1.0415, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.8223234624145785, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 0.00019586724805710306, |
|
"loss": 1.0477, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.845102505694761, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 0.0001956374630669672, |
|
"loss": 1.0509, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.867881548974943, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 0.00019540160383254107, |
|
"loss": 1.0417, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.8906605922551254, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 0.00019515968533400673, |
|
"loss": 1.0382, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.9134396355353074, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 0.00019491172293638968, |
|
"loss": 1.0424, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.9362186788154898, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 0.00019465773238858298, |
|
"loss": 1.0425, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.958997722095672, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 0.00019439772982234697, |
|
"loss": 1.041, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.9817767653758542, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 0.00019413173175128473, |
|
"loss": 1.0276, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 2.4670326709747314, |
|
"eval_runtime": 0.2346, |
|
"eval_samples_per_second": 42.634, |
|
"eval_steps_per_second": 4.263, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 2.0045558086560367, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 0.0001938597550697932, |
|
"loss": 1.0312, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.0273348519362187, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 0.00019358181705199015, |
|
"loss": 1.0035, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.050113895216401, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 0.00019329793535061723, |
|
"loss": 1.0027, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.072892938496583, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 0.00019300812799591846, |
|
"loss": 0.9951, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.0956719817767655, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 0.00019271241339449536, |
|
"loss": 1.0143, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.1184510250569475, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 0.00019241081032813772, |
|
"loss": 0.9942, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.14123006833713, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 0.00019210333795263075, |
|
"loss": 1.0064, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.164009111617312, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 0.00019179001579653853, |
|
"loss": 0.9909, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.1867881548974943, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 0.0001914708637599636, |
|
"loss": 1.006, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.2095671981776768, |
|
"grad_norm": 1.375, |
|
"learning_rate": 0.00019114590211328288, |
|
"loss": 1.0048, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.2323462414578588, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 0.0001908151514958606, |
|
"loss": 1.0088, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.255125284738041, |
|
"grad_norm": 4.0, |
|
"learning_rate": 0.00019047863291473717, |
|
"loss": 1.0095, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.277904328018223, |
|
"grad_norm": 10.875, |
|
"learning_rate": 0.00019013636774329495, |
|
"loss": 1.0044, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.3006833712984056, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 0.00018978837771990085, |
|
"loss": 1.0028, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.3234624145785876, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 0.0001894346849465257, |
|
"loss": 0.9945, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.34624145785877, |
|
"grad_norm": 0.86328125, |
|
"learning_rate": 0.00018907531188734026, |
|
"loss": 0.9848, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.369020501138952, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 0.00018871028136728874, |
|
"loss": 0.9888, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.3917995444191344, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 0.00018833961657063885, |
|
"loss": 1.0026, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.4145785876993164, |
|
"grad_norm": 11.1875, |
|
"learning_rate": 0.0001879633410395095, |
|
"loss": 1.0341, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.437357630979499, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 0.00018758147867237548, |
|
"loss": 1.0097, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.4601366742596813, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 0.00018719405372254948, |
|
"loss": 1.0025, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.4829157175398633, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 0.00018680109079664188, |
|
"loss": 1.0056, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.5056947608200457, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 0.0001864026148529978, |
|
"loss": 1.0143, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.5284738041002277, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 0.00018599865120011192, |
|
"loss": 1.0024, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.55125284738041, |
|
"grad_norm": 17.875, |
|
"learning_rate": 0.00018558922549502107, |
|
"loss": 1.0595, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.574031890660592, |
|
"grad_norm": 1.0, |
|
"learning_rate": 0.0001851743637416747, |
|
"loss": 1.0061, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.5968109339407746, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 0.00018475409228928312, |
|
"loss": 0.9936, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.619589977220957, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 0.00018432843783064429, |
|
"loss": 1.0232, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.642369020501139, |
|
"grad_norm": 5.375, |
|
"learning_rate": 0.00018389742740044813, |
|
"loss": 0.989, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.665148063781321, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 0.00018346108837355972, |
|
"loss": 0.9927, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.6879271070615034, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 0.00018301944846328049, |
|
"loss": 0.9807, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.710706150341686, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 0.0001825725357195881, |
|
"loss": 1.0005, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.733485193621868, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 0.00018212037852735486, |
|
"loss": 0.9641, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.7562642369020502, |
|
"grad_norm": 1.875, |
|
"learning_rate": 0.0001816630056045451, |
|
"loss": 0.9897, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.7790432801822322, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 0.0001812004460003909, |
|
"loss": 0.9767, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.8018223234624147, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 0.00018073272909354727, |
|
"loss": 0.9739, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.8246013667425967, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 0.0001802598845902262, |
|
"loss": 0.9698, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.847380410022779, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 0.00017978194252230985, |
|
"loss": 0.9707, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.8701594533029615, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 0.00017929893324544332, |
|
"loss": 0.9717, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.8929384965831435, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.0001788108874371063, |
|
"loss": 0.9655, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.9157175398633255, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 0.00017831783609466504, |
|
"loss": 0.998, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.938496583143508, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 0.00017781981053340337, |
|
"loss": 0.9503, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.9612756264236904, |
|
"grad_norm": 1.9609375, |
|
"learning_rate": 0.00017731684238453385, |
|
"loss": 0.9531, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.9840546697038723, |
|
"grad_norm": 0.875, |
|
"learning_rate": 0.0001768089635931887, |
|
"loss": 0.9492, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.997722095671982, |
|
"eval_loss": 2.445070266723633, |
|
"eval_runtime": 0.2582, |
|
"eval_samples_per_second": 38.725, |
|
"eval_steps_per_second": 3.872, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 3.0068337129840548, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 0.00017629620641639103, |
|
"loss": 0.9628, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.0296127562642368, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 0.00017577860342100579, |
|
"loss": 0.9402, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 3.052391799544419, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 0.0001752561874816717, |
|
"loss": 0.9384, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.075170842824601, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 0.00017472899177871297, |
|
"loss": 0.9494, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.0979498861047836, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 0.00017419704979603214, |
|
"loss": 0.9598, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.120728929384966, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 0.00017366039531898326, |
|
"loss": 0.9588, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 3.143507972665148, |
|
"grad_norm": 11.5625, |
|
"learning_rate": 0.00017311906243222614, |
|
"loss": 0.9401, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.1662870159453305, |
|
"grad_norm": 7.125, |
|
"learning_rate": 0.0001725730855175615, |
|
"loss": 0.9367, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 3.1890660592255125, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 0.00017202249925174723, |
|
"loss": 0.932, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.211845102505695, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 0.00017146733860429612, |
|
"loss": 0.9413, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 3.234624145785877, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 0.0001709076388352546, |
|
"loss": 0.95, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.2574031890660593, |
|
"grad_norm": 18.0, |
|
"learning_rate": 0.00017034343549296346, |
|
"loss": 0.9519, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 3.2801822323462413, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 0.00016977476441179992, |
|
"loss": 0.932, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.3029612756264237, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 0.0001692016617099018, |
|
"loss": 0.9213, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 3.3257403189066057, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 0.0001686241637868734, |
|
"loss": 0.9203, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.348519362186788, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 0.0001680423073214737, |
|
"loss": 0.9136, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 3.3712984054669706, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 0.00016745612926928694, |
|
"loss": 0.9299, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.3940774487471526, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 0.0001668656668603751, |
|
"loss": 0.9088, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.416856492027335, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 0.00016627095759691362, |
|
"loss": 0.9212, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.439635535307517, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 0.0001656720392508094, |
|
"loss": 0.9136, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.4624145785876994, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 0.00016506894986130171, |
|
"loss": 0.9163, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.4851936218678814, |
|
"grad_norm": 1.8984375, |
|
"learning_rate": 0.00016446172773254629, |
|
"loss": 0.917, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.507972665148064, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 0.00016385041143118255, |
|
"loss": 0.9071, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.5307517084282463, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 0.000163235039783884, |
|
"loss": 0.897, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.5535307517084282, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 0.0001626156518748922, |
|
"loss": 0.8978, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.5763097949886102, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.00016199228704353455, |
|
"loss": 0.896, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.5990888382687927, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 0.00016136498488172568, |
|
"loss": 0.8931, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.621867881548975, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.0001607337852314527, |
|
"loss": 0.8912, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 3.644646924829157, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 0.00016009872818224485, |
|
"loss": 0.892, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.6674259681093395, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 0.00015945985406862721, |
|
"loss": 0.9028, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 3.6902050113895215, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 0.00015881720346755905, |
|
"loss": 0.8992, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.712984054669704, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 0.00015817081719585643, |
|
"loss": 0.9005, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 3.735763097949886, |
|
"grad_norm": 16.625, |
|
"learning_rate": 0.00015752073630759998, |
|
"loss": 0.9018, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.7585421412300684, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 0.00015686700209152738, |
|
"loss": 0.9036, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 3.781321184510251, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 0.00015620965606841098, |
|
"loss": 0.8872, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.8041002277904328, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 0.0001555487399884206, |
|
"loss": 0.8847, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 3.8268792710706148, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 0.00015488429582847192, |
|
"loss": 0.8814, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.849658314350797, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 0.0001542163657895605, |
|
"loss": 0.8898, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 3.8724373576309796, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 0.00015354499229408114, |
|
"loss": 0.8841, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.8952164009111616, |
|
"grad_norm": 0.75, |
|
"learning_rate": 0.0001528702179831338, |
|
"loss": 0.879, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 3.917995444191344, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 0.00015219208571381525, |
|
"loss": 0.8911, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.940774487471526, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 0.00015151063855649698, |
|
"loss": 0.8844, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 3.9635535307517085, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.00015082591979208976, |
|
"loss": 0.8787, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.9863325740318905, |
|
"grad_norm": 0.7890625, |
|
"learning_rate": 0.00015013797290929466, |
|
"loss": 0.8751, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 2.435899496078491, |
|
"eval_runtime": 0.2346, |
|
"eval_samples_per_second": 42.632, |
|
"eval_steps_per_second": 4.263, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 4.009111617312073, |
|
"grad_norm": 0.83203125, |
|
"learning_rate": 0.00014944684160184108, |
|
"loss": 0.8723, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 4.031890660592255, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.00014875256976571135, |
|
"loss": 0.8563, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 4.054669703872437, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.00014805520149635307, |
|
"loss": 0.8632, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 4.077448747152619, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00014735478108587828, |
|
"loss": 0.8585, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 4.100227790432802, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 0.00014665135302025035, |
|
"loss": 0.8662, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.123006833712984, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.00014594496197645852, |
|
"loss": 0.8569, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 4.145785876993166, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 0.0001452356528196804, |
|
"loss": 0.8647, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.168564920273348, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 0.00014452347060043237, |
|
"loss": 0.8513, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 4.191343963553531, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00014380846055170828, |
|
"loss": 0.8568, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 4.214123006833713, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 0.00014309066808610655, |
|
"loss": 0.8645, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 4.236902050113895, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 0.0001423701387929459, |
|
"loss": 0.8526, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.259681093394078, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00014164691843536982, |
|
"loss": 0.8656, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 4.28246013667426, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 0.00014092105294744, |
|
"loss": 0.8579, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 4.305239179954442, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 0.00014019258843121893, |
|
"loss": 0.854, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 4.328018223234624, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.0001394615711538417, |
|
"loss": 0.8517, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.350797266514807, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.00013872804754457759, |
|
"loss": 0.8575, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 4.373576309794989, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.00013799206419188103, |
|
"loss": 0.8528, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 4.396355353075171, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 0.00013725366784043288, |
|
"loss": 0.8551, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 4.4191343963553535, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 0.00013651290538817113, |
|
"loss": 0.8553, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 4.4419134396355355, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 0.0001357698238833126, |
|
"loss": 0.8585, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 4.4646924829157175, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.00013502447052136455, |
|
"loss": 0.8511, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 4.4874715261958995, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00013427689264212738, |
|
"loss": 0.8612, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 4.510250569476082, |
|
"grad_norm": 0.83203125, |
|
"learning_rate": 0.00013352713772668765, |
|
"loss": 0.8502, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.533029612756264, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 0.0001327752533944025, |
|
"loss": 0.858, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 4.555808656036446, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.00013202128739987532, |
|
"loss": 0.8462, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.578587699316628, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.00013126528762992247, |
|
"loss": 0.8539, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 4.601366742596811, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.0001305073021005321, |
|
"loss": 0.8562, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.624145785876993, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 0.0001297473789538142, |
|
"loss": 0.8625, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 4.646924829157175, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00012898556645494325, |
|
"loss": 0.8547, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.669703872437358, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 0.0001282219129890925, |
|
"loss": 0.849, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 4.69248291571754, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 0.00012745646705836097, |
|
"loss": 0.8469, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.715261958997722, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.0001266892772786929, |
|
"loss": 0.8512, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 4.738041002277904, |
|
"grad_norm": 0.79296875, |
|
"learning_rate": 0.0001259203923767901, |
|
"loss": 0.8537, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.760820045558087, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.00012514986118701695, |
|
"loss": 0.8512, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 4.783599088838269, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 0.00012437773264829897, |
|
"loss": 0.8507, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.806378132118451, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00012360405580101448, |
|
"loss": 0.8485, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 4.829157175398633, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.00012282887978387976, |
|
"loss": 0.8498, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.851936218678816, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.00012205225383082843, |
|
"loss": 0.8518, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 4.874715261958998, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 0.000121274227267884, |
|
"loss": 0.846, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.89749430523918, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.00012049484951002739, |
|
"loss": 0.8454, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 4.920273348519363, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00011971417005805818, |
|
"loss": 0.8457, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.943052391799545, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.00011893223849545084, |
|
"loss": 0.845, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 4.965831435079727, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 0.00011814910448520536, |
|
"loss": 0.8473, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.988610478359909, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.00011736481776669306, |
|
"loss": 0.8477, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 4.997722095671982, |
|
"eval_loss": 2.439002275466919, |
|
"eval_runtime": 0.2582, |
|
"eval_samples_per_second": 38.729, |
|
"eval_steps_per_second": 3.873, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 5.011389521640091, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 0.00011657942815249754, |
|
"loss": 0.8327, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 5.034168564920273, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00011579298552525084, |
|
"loss": 0.8204, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 5.056947608200455, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00011500553983446527, |
|
"loss": 0.8223, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 5.079726651480637, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.00011421714109336097, |
|
"loss": 0.8185, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 5.10250569476082, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.00011342783937568926, |
|
"loss": 0.8175, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 5.125284738041002, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 0.00011263768481255264, |
|
"loss": 0.8238, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 5.148063781321184, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.00011184672758922034, |
|
"loss": 0.8184, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 5.170842824601367, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.00011105501794194131, |
|
"loss": 0.8184, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 5.193621867881549, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00011026260615475333, |
|
"loss": 0.8265, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 5.216400911161731, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00010946954255628928, |
|
"loss": 0.83, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 5.239179954441913, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.00010867587751658079, |
|
"loss": 0.8217, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 5.261958997722096, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.00010788166144385888, |
|
"loss": 0.8234, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 5.284738041002278, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.0001070869447813525, |
|
"loss": 0.8175, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 5.30751708428246, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.0001062917780040847, |
|
"loss": 0.8245, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 5.330296127562642, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0001054962116156667, |
|
"loss": 0.8147, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 5.353075170842825, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.00010470029614509041, |
|
"loss": 0.8196, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 5.375854214123007, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00010390408214351892, |
|
"loss": 0.8208, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 5.398633257403189, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.0001031076201810762, |
|
"loss": 0.8157, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 5.421412300683372, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.00010231096084363483, |
|
"loss": 0.8309, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 5.444191343963554, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.00010151415472960342, |
|
"loss": 0.8216, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 5.466970387243736, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.00010071725244671282, |
|
"loss": 0.824, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 5.489749430523918, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 9.992030460880181e-05, |
|
"loss": 0.8223, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 5.5125284738041005, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 9.91233618326026e-05, |
|
"loss": 0.8222, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 5.5353075170842825, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 9.83264747345259e-05, |
|
"loss": 0.8225, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 5.5580865603644645, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 9.752969392744606e-05, |
|
"loss": 0.8278, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 5.5808656036446465, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 9.673307001748661e-05, |
|
"loss": 0.8139, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 5.603644646924829, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 9.593665360080599e-05, |
|
"loss": 0.8191, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 5.626423690205011, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 9.514049526038418e-05, |
|
"loss": 0.8231, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 5.649202733485193, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 9.43446455628097e-05, |
|
"loss": 0.8228, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 5.671981776765376, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 9.354915505506839e-05, |
|
"loss": 0.8123, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 5.694760820045558, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 9.27540742613326e-05, |
|
"loss": 0.8157, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 5.71753986332574, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 9.195945367975256e-05, |
|
"loss": 0.8255, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 5.740318906605923, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 9.116534377924883e-05, |
|
"loss": 0.8224, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 5.763097949886105, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 9.037179499630703e-05, |
|
"loss": 0.8189, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 5.785876993166287, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 8.957885773177438e-05, |
|
"loss": 0.8129, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 5.808656036446469, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 8.878658234765858e-05, |
|
"loss": 0.8149, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 5.831435079726651, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 8.799501916392912e-05, |
|
"loss": 0.8173, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 5.854214123006834, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 8.720421845532151e-05, |
|
"loss": 0.8197, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 5.876993166287016, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 8.641423044814374e-05, |
|
"loss": 0.8122, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 5.899772209567198, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 8.562510531708677e-05, |
|
"loss": 0.8124, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 5.922551252847381, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 8.48368931820373e-05, |
|
"loss": 0.8148, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 5.945330296127563, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 8.404964410489485e-05, |
|
"loss": 0.8178, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 5.968109339407745, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 8.32634080863919e-05, |
|
"loss": 0.808, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 5.990888382687928, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 8.247823506291844e-05, |
|
"loss": 0.809, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 2.4545505046844482, |
|
"eval_runtime": 0.2415, |
|
"eval_samples_per_second": 41.413, |
|
"eval_steps_per_second": 4.141, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 6.0136674259681095, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 8.169417490335007e-05, |
|
"loss": 0.8048, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 6.0364464692482915, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 8.091127740588094e-05, |
|
"loss": 0.7976, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 6.0592255125284735, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 8.012959229486061e-05, |
|
"loss": 0.7858, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 6.082004555808656, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 7.934916921763628e-05, |
|
"loss": 0.7997, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 6.104783599088838, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 7.857005774139907e-05, |
|
"loss": 0.8061, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 6.12756264236902, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 7.779230735003628e-05, |
|
"loss": 0.7975, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 6.150341685649202, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 7.701596744098818e-05, |
|
"loss": 0.7943, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 6.173120728929385, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 7.624108732211081e-05, |
|
"loss": 0.7965, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 6.195899772209567, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 7.54677162085442e-05, |
|
"loss": 0.7944, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 6.218678815489749, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 7.469590321958662e-05, |
|
"loss": 0.7987, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 6.241457858769932, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 7.392569737557474e-05, |
|
"loss": 0.8024, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 6.264236902050114, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 7.31571475947703e-05, |
|
"loss": 0.7931, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 6.287015945330296, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 7.239030269025311e-05, |
|
"loss": 0.8034, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 6.309794988610478, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 7.162521136682085e-05, |
|
"loss": 0.7963, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 6.332574031890661, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 7.08619222178954e-05, |
|
"loss": 0.8004, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 6.355353075170843, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 7.010048372243698e-05, |
|
"loss": 0.7979, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 6.378132118451025, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 6.934094424186459e-05, |
|
"loss": 0.799, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 6.400911161731207, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 6.858335201698485e-05, |
|
"loss": 0.7999, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 6.42369020501139, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 6.782775516492771e-05, |
|
"loss": 0.798, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 6.446469248291572, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 6.70742016760907e-05, |
|
"loss": 0.7977, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 6.469248291571754, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 6.632273941109064e-05, |
|
"loss": 0.8018, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 6.492027334851937, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 6.5573416097724e-05, |
|
"loss": 0.7899, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 6.514806378132119, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 6.482627932793553e-05, |
|
"loss": 0.7987, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 6.537585421412301, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 6.408137655479554e-05, |
|
"loss": 0.799, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 6.560364464692483, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 6.333875508948593e-05, |
|
"loss": 0.7986, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 6.5831435079726655, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 6.259846209829551e-05, |
|
"loss": 0.7912, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 6.605922551252847, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 6.186054459962399e-05, |
|
"loss": 0.7962, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 6.628701594533029, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 6.112504946099604e-05, |
|
"loss": 0.7917, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 6.651480637813211, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 6.039202339608432e-05, |
|
"loss": 0.7961, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 6.674259681093394, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 5.966151296174268e-05, |
|
"loss": 0.7991, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 6.697038724373576, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 5.8933564555049105e-05, |
|
"loss": 0.7883, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 6.719817767653758, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 5.820822441035899e-05, |
|
"loss": 0.7985, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 6.742596810933941, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 5.7485538596368496e-05, |
|
"loss": 0.798, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 6.765375854214123, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 5.6765553013188766e-05, |
|
"loss": 0.7969, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 6.788154897494305, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 5.6048313389430484e-05, |
|
"loss": 0.7892, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 6.810933940774487, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 5.533386527929962e-05, |
|
"loss": 0.795, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 6.83371298405467, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 5.462225405970401e-05, |
|
"loss": 0.796, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 6.856492027334852, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 5.391352492737157e-05, |
|
"loss": 0.7919, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 6.879271070615034, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 5.3207722895979406e-05, |
|
"loss": 0.7903, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 6.902050113895216, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 5.2504892793295e-05, |
|
"loss": 0.795, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 6.924829157175399, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 5.1805079258329056e-05, |
|
"loss": 0.7927, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 6.947608200455581, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 5.110832673850039e-05, |
|
"loss": 0.7938, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 6.970387243735763, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 5.041467948681269e-05, |
|
"loss": 0.8044, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 6.993166287015946, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 4.9724181559044234e-05, |
|
"loss": 0.7918, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 6.997722095671982, |
|
"eval_loss": 2.4591777324676514, |
|
"eval_runtime": 0.2545, |
|
"eval_samples_per_second": 39.288, |
|
"eval_steps_per_second": 3.929, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 7.015945330296128, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 4.903687681094942e-05, |
|
"loss": 0.7889, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 7.03872437357631, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 4.835280889547351e-05, |
|
"loss": 0.7757, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 7.061503416856492, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 4.767202125998005e-05, |
|
"loss": 0.7753, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 7.0842824601366745, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 4.699455714349152e-05, |
|
"loss": 0.7781, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 7.1070615034168565, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 4.6320459573942856e-05, |
|
"loss": 0.7856, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 7.1298405466970385, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 4.564977136544873e-05, |
|
"loss": 0.7768, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 7.152619589977221, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 4.49825351155843e-05, |
|
"loss": 0.7854, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 7.175398633257403, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 4.431879320267972e-05, |
|
"loss": 0.789, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 7.198177676537585, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 4.3658587783128425e-05, |
|
"loss": 0.7933, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 7.220956719817767, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 4.300196078870982e-05, |
|
"loss": 0.7788, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 7.24373576309795, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 4.2348953923925916e-05, |
|
"loss": 0.786, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 7.266514806378132, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 4.16996086633526e-05, |
|
"loss": 0.7851, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 7.289293849658314, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 4.105396624900538e-05, |
|
"loss": 0.7775, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 7.312072892938497, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 4.041206768772022e-05, |
|
"loss": 0.7796, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 7.334851936218679, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 3.977395374854871e-05, |
|
"loss": 0.7814, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 7.357630979498861, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 3.913966496016891e-05, |
|
"loss": 0.7798, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 7.380410022779043, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 3.850924160831115e-05, |
|
"loss": 0.7928, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 7.403189066059226, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 3.788272373319955e-05, |
|
"loss": 0.7922, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 7.425968109339408, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 3.726015112700859e-05, |
|
"loss": 0.7822, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 7.44874715261959, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 3.6641563331336125e-05, |
|
"loss": 0.7845, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 7.471526195899772, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 3.6026999634691725e-05, |
|
"loss": 0.7817, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 7.494305239179955, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 3.541649907000147e-05, |
|
"loss": 0.7746, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 7.517084282460137, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 3.4810100412128747e-05, |
|
"loss": 0.7886, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 7.539863325740319, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 3.42078421754117e-05, |
|
"loss": 0.7861, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 7.562642369020502, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 3.360976261121684e-05, |
|
"loss": 0.7778, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 7.585421412300684, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 3.3015899705509734e-05, |
|
"loss": 0.7829, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 7.6082004555808656, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 3.242629117644229e-05, |
|
"loss": 0.7799, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 7.6309794988610475, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 3.184097447195732e-05, |
|
"loss": 0.7734, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 7.65375854214123, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 3.125998676740987e-05, |
|
"loss": 0.7796, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 7.676537585421412, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 3.068336496320631e-05, |
|
"loss": 0.787, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 7.699316628701594, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 3.0111145682460507e-05, |
|
"loss": 0.7842, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 7.722095671981776, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 2.9543365268667867e-05, |
|
"loss": 0.7885, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 7.744874715261959, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 2.8980059783396953e-05, |
|
"loss": 0.7773, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 7.767653758542141, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 2.8421265003999286e-05, |
|
"loss": 0.785, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 7.790432801822323, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 2.7867016421336776e-05, |
|
"loss": 0.7875, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 7.813211845102506, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 2.7317349237527724e-05, |
|
"loss": 0.7884, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 7.835990888382688, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 2.6772298363710956e-05, |
|
"loss": 0.7823, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 7.85876993166287, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 2.6231898417828603e-05, |
|
"loss": 0.7754, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 7.881548974943052, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 2.569618372242727e-05, |
|
"loss": 0.7713, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 7.904328018223235, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 2.5165188302478215e-05, |
|
"loss": 0.7828, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 7.927107061503417, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 2.4638945883216235e-05, |
|
"loss": 0.7912, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 7.949886104783599, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 2.411748988799769e-05, |
|
"loss": 0.7793, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 7.972665148063781, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 2.3600853436177672e-05, |
|
"loss": 0.7779, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 7.995444191343964, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 2.3089069341006565e-05, |
|
"loss": 0.7847, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 2.4783217906951904, |
|
"eval_runtime": 0.2354, |
|
"eval_samples_per_second": 42.482, |
|
"eval_steps_per_second": 4.248, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 8.018223234624147, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 2.2582170107545852e-05, |
|
"loss": 0.7742, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 8.041002277904328, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 2.2080187930603668e-05, |
|
"loss": 0.7806, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 8.06378132118451, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 2.1583154692689976e-05, |
|
"loss": 0.7805, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 8.086560364464692, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 2.109110196199171e-05, |
|
"loss": 0.773, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 8.109339407744875, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 2.0604060990367624e-05, |
|
"loss": 0.7698, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 8.132118451025057, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 2.0122062711363532e-05, |
|
"loss": 0.7773, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 8.154897494305239, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 1.9645137738247422e-05, |
|
"loss": 0.7744, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 8.177676537585421, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 1.9173316362065384e-05, |
|
"loss": 0.7799, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 8.200455580865604, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 1.8706628549717452e-05, |
|
"loss": 0.7766, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 8.223234624145785, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 1.824510394205453e-05, |
|
"loss": 0.7669, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 8.246013667425968, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 1.7788771851995655e-05, |
|
"loss": 0.7799, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 8.268792710706151, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 1.7337661262666294e-05, |
|
"loss": 0.7787, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 8.291571753986332, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 1.6891800825557535e-05, |
|
"loss": 0.7788, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 8.314350797266515, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 1.6451218858706374e-05, |
|
"loss": 0.7828, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 8.337129840546696, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 1.601594334489702e-05, |
|
"loss": 0.7714, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 8.35990888382688, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 1.5586001929883865e-05, |
|
"loss": 0.7842, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 8.382687927107062, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 1.516142192063521e-05, |
|
"loss": 0.7746, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 8.405466970387243, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 1.474223028359939e-05, |
|
"loss": 0.7764, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 8.428246013667426, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 1.4328453642991646e-05, |
|
"loss": 0.7726, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 8.451025056947609, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 1.392011827910341e-05, |
|
"loss": 0.7766, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 8.47380410022779, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 1.3517250126632986e-05, |
|
"loss": 0.7757, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 8.496583143507973, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 1.311987477303842e-05, |
|
"loss": 0.773, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 8.519362186788156, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 1.2728017456912344e-05, |
|
"loss": 0.7793, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 8.542141230068337, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 1.2341703066379074e-05, |
|
"loss": 0.7665, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 8.56492027334852, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 1.1960956137513701e-05, |
|
"loss": 0.7737, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 8.5876993166287, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 1.158580085278398e-05, |
|
"loss": 0.7753, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 8.610478359908884, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 1.1216261039514087e-05, |
|
"loss": 0.7727, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 8.633257403189067, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 1.0852360168371656e-05, |
|
"loss": 0.7775, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 8.656036446469248, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 1.049412135187675e-05, |
|
"loss": 0.7831, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 8.67881548974943, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 1.0141567342934132e-05, |
|
"loss": 0.7757, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 8.701594533029613, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 9.794720533388024e-06, |
|
"loss": 0.7719, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 8.724373576309794, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 9.453602952599982e-06, |
|
"loss": 0.7691, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 8.747152619589977, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 9.118236266049707e-06, |
|
"loss": 0.7745, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 8.76993166287016, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 8.788641773959105e-06, |
|
"loss": 0.7774, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 8.792710706150341, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 8.464840409939267e-06, |
|
"loss": 0.7807, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 8.815489749430524, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 8.146852739661105e-06, |
|
"loss": 0.7765, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 8.838268792710707, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 7.834698959548914e-06, |
|
"loss": 0.7777, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 8.861047835990888, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 7.528398895497924e-06, |
|
"loss": 0.7732, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 8.883826879271071, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 7.2279720016148244e-06, |
|
"loss": 0.7691, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 8.906605922551252, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 6.933437358982409e-06, |
|
"loss": 0.7769, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 8.929384965831435, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 6.6448136744474474e-06, |
|
"loss": 0.779, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 8.952164009111618, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 6.36211927943271e-06, |
|
"loss": 0.7825, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 8.974943052391799, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 6.085372128772637e-06, |
|
"loss": 0.7628, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 8.997722095671982, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 5.814589799572956e-06, |
|
"loss": 0.7808, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 8.997722095671982, |
|
"eval_loss": 2.488940477371216, |
|
"eval_runtime": 0.2524, |
|
"eval_samples_per_second": 39.612, |
|
"eval_steps_per_second": 3.961, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 9.020501138952165, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 5.549789490094304e-06, |
|
"loss": 0.7739, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 9.043280182232346, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 5.290988018659937e-06, |
|
"loss": 0.7769, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 9.066059225512529, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 5.038201822587474e-06, |
|
"loss": 0.779, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 9.08883826879271, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 4.79144695714504e-06, |
|
"loss": 0.7766, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 9.111617312072893, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 4.550739094531386e-06, |
|
"loss": 0.7772, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 9.134396355353076, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 4.316093522880648e-06, |
|
"loss": 0.7784, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 9.157175398633257, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 4.087525145291204e-06, |
|
"loss": 0.774, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 9.17995444191344, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 3.865048478879241e-06, |
|
"loss": 0.7727, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 9.202733485193622, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 3.6486776538566803e-06, |
|
"loss": 0.7746, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 9.225512528473804, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 3.4384264126337328e-06, |
|
"loss": 0.779, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 9.248291571753986, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 3.2343081089460603e-06, |
|
"loss": 0.7734, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 9.27107061503417, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 3.0363357070066544e-06, |
|
"loss": 0.7691, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 9.29384965831435, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 2.8445217806824077e-06, |
|
"loss": 0.7853, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 9.316628701594533, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 2.658878512695562e-06, |
|
"loss": 0.7779, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 9.339407744874716, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 2.4794176938498837e-06, |
|
"loss": 0.7784, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 9.362186788154897, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 2.30615072228183e-06, |
|
"loss": 0.7721, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 9.38496583143508, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 2.139088602736616e-06, |
|
"loss": 0.7761, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 9.407744874715261, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 1.9782419458692193e-06, |
|
"loss": 0.7715, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 9.430523917995444, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 1.8236209675705274e-06, |
|
"loss": 0.7753, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 9.453302961275627, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 1.6752354883184717e-06, |
|
"loss": 0.7718, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 9.476082004555808, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 1.5330949325542797e-06, |
|
"loss": 0.7713, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 9.498861047835991, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 1.397208328083921e-06, |
|
"loss": 0.7747, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 9.521640091116174, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 1.2675843055046765e-06, |
|
"loss": 0.7831, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 9.544419134396355, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 1.144231097657078e-06, |
|
"loss": 0.7693, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 9.567198177676538, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 1.0271565391018922e-06, |
|
"loss": 0.766, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 9.589977220956719, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 9.163680656226303e-07, |
|
"loss": 0.7648, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 9.612756264236902, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 8.118727137532034e-07, |
|
"loss": 0.7703, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 9.635535307517085, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 7.136771203310245e-07, |
|
"loss": 0.7743, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 9.658314350797266, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 6.21787522075512e-07, |
|
"loss": 0.7761, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 9.681093394077449, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 5.362097551919631e-07, |
|
"loss": 0.7649, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 9.703872437357631, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 4.569492550008603e-07, |
|
"loss": 0.7786, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 9.726651480637813, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 3.84011055592659e-07, |
|
"loss": 0.7718, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 9.749430523917995, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 3.1739978950806603e-07, |
|
"loss": 0.7755, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 9.772209567198178, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 2.5711968744382974e-07, |
|
"loss": 0.767, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 9.79498861047836, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 2.0317457798398888e-07, |
|
"loss": 0.7721, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 9.817767653758542, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 1.5556788735676675e-07, |
|
"loss": 0.7766, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 9.840546697038725, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 1.143026392168789e-07, |
|
"loss": 0.7745, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 9.863325740318906, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 7.938145445357536e-08, |
|
"loss": 0.7757, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 9.88610478359909, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 5.0806551024129565e-08, |
|
"loss": 0.7783, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 9.90888382687927, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 2.8579743813006432e-08, |
|
"loss": 0.7669, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 9.931662870159453, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 1.270244451652136e-08, |
|
"loss": 0.7725, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 9.954441913439636, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 3.175661553256326e-09, |
|
"loss": 0.7773, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 9.977220956719817, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0, |
|
"loss": 0.7794, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 9.977220956719817, |
|
"eval_loss": 2.486895799636841, |
|
"eval_runtime": 0.2341, |
|
"eval_samples_per_second": 42.715, |
|
"eval_steps_per_second": 4.272, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 9.977220956719817, |
|
"step": 2190, |
|
"total_flos": 6.685995643959247e+18, |
|
"train_loss": 1.6744230088577967, |
|
"train_runtime": 5295.0828, |
|
"train_samples_per_second": 26.519, |
|
"train_steps_per_second": 0.414 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2190, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 100, |
|
"total_flos": 6.685995643959247e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|