|
{ |
|
"best_metric": 1.6337618827819824, |
|
"best_model_checkpoint": "outputs/checkpoint-298", |
|
"epoch": 0.9987368421052631, |
|
"eval_steps": 149, |
|
"global_step": 593, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0016842105263157896, |
|
"grad_norm": 0.6960500478744507, |
|
"learning_rate": 0.001, |
|
"loss": 2.2378, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003368421052631579, |
|
"grad_norm": 1.7757582664489746, |
|
"learning_rate": 0.001, |
|
"loss": 2.3598, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0050526315789473685, |
|
"grad_norm": 4.837254047393799, |
|
"learning_rate": 0.001, |
|
"loss": 3.0649, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.006736842105263158, |
|
"grad_norm": 1.8388794660568237, |
|
"learning_rate": 0.001, |
|
"loss": 2.4077, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.008421052631578947, |
|
"grad_norm": 2.6212971210479736, |
|
"learning_rate": 0.001, |
|
"loss": 1.8794, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.010105263157894737, |
|
"grad_norm": 3.2032198905944824, |
|
"learning_rate": 0.001, |
|
"loss": 2.2291, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.011789473684210527, |
|
"grad_norm": 0.9379774332046509, |
|
"learning_rate": 0.001, |
|
"loss": 1.9836, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.013473684210526317, |
|
"grad_norm": 0.8930391073226929, |
|
"learning_rate": 0.001, |
|
"loss": 1.9855, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.015157894736842105, |
|
"grad_norm": 1.121151089668274, |
|
"learning_rate": 0.001, |
|
"loss": 2.142, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.016842105263157894, |
|
"grad_norm": 1.3604674339294434, |
|
"learning_rate": 0.001, |
|
"loss": 2.0745, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.018526315789473686, |
|
"grad_norm": 0.9475807547569275, |
|
"learning_rate": 0.001, |
|
"loss": 2.2516, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.020210526315789474, |
|
"grad_norm": 1.56779146194458, |
|
"learning_rate": 0.001, |
|
"loss": 1.9924, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.021894736842105262, |
|
"grad_norm": 1.14510178565979, |
|
"learning_rate": 0.001, |
|
"loss": 1.8669, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.023578947368421053, |
|
"grad_norm": 0.9147089719772339, |
|
"learning_rate": 0.001, |
|
"loss": 2.1365, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02526315789473684, |
|
"grad_norm": 0.7278650403022766, |
|
"learning_rate": 0.001, |
|
"loss": 2.2927, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.026947368421052633, |
|
"grad_norm": 0.6441658735275269, |
|
"learning_rate": 0.001, |
|
"loss": 2.1152, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02863157894736842, |
|
"grad_norm": 0.6069326996803284, |
|
"learning_rate": 0.001, |
|
"loss": 1.8998, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03031578947368421, |
|
"grad_norm": 0.7715803384780884, |
|
"learning_rate": 0.001, |
|
"loss": 2.3267, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.032, |
|
"grad_norm": 1.274883508682251, |
|
"learning_rate": 0.001, |
|
"loss": 1.9622, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03368421052631579, |
|
"grad_norm": 1.315987467765808, |
|
"learning_rate": 0.001, |
|
"loss": 2.0188, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03536842105263158, |
|
"grad_norm": 0.7619612216949463, |
|
"learning_rate": 0.001, |
|
"loss": 2.1692, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03705263157894737, |
|
"grad_norm": 0.6206510066986084, |
|
"learning_rate": 0.001, |
|
"loss": 1.9139, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03873684210526316, |
|
"grad_norm": 0.7554602026939392, |
|
"learning_rate": 0.001, |
|
"loss": 2.0588, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04042105263157895, |
|
"grad_norm": 0.6345531344413757, |
|
"learning_rate": 0.001, |
|
"loss": 2.1671, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.042105263157894736, |
|
"grad_norm": 0.6695383191108704, |
|
"learning_rate": 0.001, |
|
"loss": 2.02, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.043789473684210524, |
|
"grad_norm": 0.4833696186542511, |
|
"learning_rate": 0.001, |
|
"loss": 1.75, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04547368421052632, |
|
"grad_norm": 0.7931104302406311, |
|
"learning_rate": 0.001, |
|
"loss": 2.1626, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.04715789473684211, |
|
"grad_norm": 1.5443921089172363, |
|
"learning_rate": 0.001, |
|
"loss": 2.0443, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.048842105263157895, |
|
"grad_norm": 0.6693266034126282, |
|
"learning_rate": 0.001, |
|
"loss": 2.2165, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05052631578947368, |
|
"grad_norm": 0.6431847810745239, |
|
"learning_rate": 0.001, |
|
"loss": 2.1791, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05221052631578947, |
|
"grad_norm": 0.5706735253334045, |
|
"learning_rate": 0.001, |
|
"loss": 1.9472, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.053894736842105266, |
|
"grad_norm": 0.7617158889770508, |
|
"learning_rate": 0.001, |
|
"loss": 2.0268, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.055578947368421054, |
|
"grad_norm": 0.5798472762107849, |
|
"learning_rate": 0.001, |
|
"loss": 1.9278, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.05726315789473684, |
|
"grad_norm": 0.5660713315010071, |
|
"learning_rate": 0.001, |
|
"loss": 1.7519, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.05894736842105263, |
|
"grad_norm": 0.5779318809509277, |
|
"learning_rate": 0.001, |
|
"loss": 1.9023, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06063157894736842, |
|
"grad_norm": 0.6591325998306274, |
|
"learning_rate": 0.001, |
|
"loss": 1.959, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06231578947368421, |
|
"grad_norm": 0.6363794803619385, |
|
"learning_rate": 0.001, |
|
"loss": 2.1155, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.064, |
|
"grad_norm": 0.6779230833053589, |
|
"learning_rate": 0.001, |
|
"loss": 1.9465, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.06568421052631579, |
|
"grad_norm": 0.6995664238929749, |
|
"learning_rate": 0.001, |
|
"loss": 1.8616, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.06736842105263158, |
|
"grad_norm": 0.7718273997306824, |
|
"learning_rate": 0.001, |
|
"loss": 2.1503, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06905263157894737, |
|
"grad_norm": 0.5334057807922363, |
|
"learning_rate": 0.001, |
|
"loss": 1.7283, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07073684210526315, |
|
"grad_norm": 0.5298858880996704, |
|
"learning_rate": 0.001, |
|
"loss": 1.6779, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07242105263157894, |
|
"grad_norm": 0.5175891518592834, |
|
"learning_rate": 0.001, |
|
"loss": 1.9119, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07410526315789474, |
|
"grad_norm": 0.5781148076057434, |
|
"learning_rate": 0.001, |
|
"loss": 1.7144, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.07578947368421053, |
|
"grad_norm": 0.5868591666221619, |
|
"learning_rate": 0.001, |
|
"loss": 1.8802, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.07747368421052632, |
|
"grad_norm": 0.5802803635597229, |
|
"learning_rate": 0.001, |
|
"loss": 1.7823, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.07915789473684211, |
|
"grad_norm": 0.6214857697486877, |
|
"learning_rate": 0.001, |
|
"loss": 1.6589, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0808421052631579, |
|
"grad_norm": 0.6045804619789124, |
|
"learning_rate": 0.001, |
|
"loss": 1.9772, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08252631578947368, |
|
"grad_norm": 0.6322258114814758, |
|
"learning_rate": 0.001, |
|
"loss": 1.7932, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08421052631578947, |
|
"grad_norm": 0.6137242317199707, |
|
"learning_rate": 0.001, |
|
"loss": 1.8022, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08589473684210526, |
|
"grad_norm": 0.5677167773246765, |
|
"learning_rate": 0.001, |
|
"loss": 1.5019, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.08757894736842105, |
|
"grad_norm": 0.6881672143936157, |
|
"learning_rate": 0.001, |
|
"loss": 1.9834, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.08926315789473684, |
|
"grad_norm": 0.6887582540512085, |
|
"learning_rate": 0.001, |
|
"loss": 2.0563, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.09094736842105264, |
|
"grad_norm": 0.6267439723014832, |
|
"learning_rate": 0.001, |
|
"loss": 1.9473, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.09263157894736843, |
|
"grad_norm": 0.7325497269630432, |
|
"learning_rate": 0.001, |
|
"loss": 1.7903, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.09431578947368421, |
|
"grad_norm": 0.6531684994697571, |
|
"learning_rate": 0.001, |
|
"loss": 1.8948, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.096, |
|
"grad_norm": 0.6358041763305664, |
|
"learning_rate": 0.001, |
|
"loss": 1.7967, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.09768421052631579, |
|
"grad_norm": 0.6489672660827637, |
|
"learning_rate": 0.001, |
|
"loss": 1.9836, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.09936842105263158, |
|
"grad_norm": 0.6798167824745178, |
|
"learning_rate": 0.001, |
|
"loss": 1.4925, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10105263157894737, |
|
"grad_norm": 0.71540367603302, |
|
"learning_rate": 0.001, |
|
"loss": 1.9803, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.10273684210526315, |
|
"grad_norm": 0.7388565540313721, |
|
"learning_rate": 0.001, |
|
"loss": 1.994, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.10442105263157894, |
|
"grad_norm": 0.5921775698661804, |
|
"learning_rate": 0.001, |
|
"loss": 1.7347, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.10610526315789473, |
|
"grad_norm": 0.6923938393592834, |
|
"learning_rate": 0.001, |
|
"loss": 1.8954, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.10778947368421053, |
|
"grad_norm": 0.6679465770721436, |
|
"learning_rate": 0.001, |
|
"loss": 1.6889, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.10947368421052632, |
|
"grad_norm": 0.6221023797988892, |
|
"learning_rate": 0.001, |
|
"loss": 1.8324, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11115789473684211, |
|
"grad_norm": 0.6746177673339844, |
|
"learning_rate": 0.001, |
|
"loss": 1.822, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.1128421052631579, |
|
"grad_norm": 0.6581069231033325, |
|
"learning_rate": 0.001, |
|
"loss": 2.2108, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.11452631578947368, |
|
"grad_norm": 0.6342150568962097, |
|
"learning_rate": 0.001, |
|
"loss": 1.7536, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.11621052631578947, |
|
"grad_norm": 0.6652107834815979, |
|
"learning_rate": 0.001, |
|
"loss": 2.0814, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.11789473684210526, |
|
"grad_norm": 0.5661808848381042, |
|
"learning_rate": 0.001, |
|
"loss": 1.9419, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.11957894736842105, |
|
"grad_norm": 0.8789241313934326, |
|
"learning_rate": 0.001, |
|
"loss": 1.8391, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.12126315789473684, |
|
"grad_norm": 0.665008008480072, |
|
"learning_rate": 0.001, |
|
"loss": 1.9936, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.12294736842105262, |
|
"grad_norm": 0.647729218006134, |
|
"learning_rate": 0.001, |
|
"loss": 1.8883, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.12463157894736843, |
|
"grad_norm": 0.5845763087272644, |
|
"learning_rate": 0.001, |
|
"loss": 1.8779, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.12631578947368421, |
|
"grad_norm": 0.6005629897117615, |
|
"learning_rate": 0.001, |
|
"loss": 2.1395, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.128, |
|
"grad_norm": 0.570796012878418, |
|
"learning_rate": 0.001, |
|
"loss": 1.7652, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.1296842105263158, |
|
"grad_norm": 0.652999222278595, |
|
"learning_rate": 0.001, |
|
"loss": 1.9479, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.13136842105263158, |
|
"grad_norm": 0.7086900472640991, |
|
"learning_rate": 0.001, |
|
"loss": 1.924, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.13305263157894737, |
|
"grad_norm": 0.8962117433547974, |
|
"learning_rate": 0.001, |
|
"loss": 1.7471, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.13473684210526315, |
|
"grad_norm": 0.9083784222602844, |
|
"learning_rate": 0.001, |
|
"loss": 1.4415, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.13642105263157894, |
|
"grad_norm": 0.6662907600402832, |
|
"learning_rate": 0.001, |
|
"loss": 1.5885, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.13810526315789473, |
|
"grad_norm": 0.748068630695343, |
|
"learning_rate": 0.001, |
|
"loss": 1.9043, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.13978947368421052, |
|
"grad_norm": 0.653835117816925, |
|
"learning_rate": 0.001, |
|
"loss": 1.9271, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.1414736842105263, |
|
"grad_norm": 0.5937058925628662, |
|
"learning_rate": 0.001, |
|
"loss": 1.6577, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.1431578947368421, |
|
"grad_norm": 0.5573813319206238, |
|
"learning_rate": 0.001, |
|
"loss": 1.7685, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.14484210526315788, |
|
"grad_norm": 0.7234801054000854, |
|
"learning_rate": 0.001, |
|
"loss": 2.1049, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.14652631578947367, |
|
"grad_norm": 0.7858671545982361, |
|
"learning_rate": 0.001, |
|
"loss": 1.8385, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.1482105263157895, |
|
"grad_norm": 0.5881790518760681, |
|
"learning_rate": 0.001, |
|
"loss": 1.6881, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.14989473684210528, |
|
"grad_norm": 0.6036899089813232, |
|
"learning_rate": 0.001, |
|
"loss": 2.0763, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.15157894736842106, |
|
"grad_norm": 0.6678960919380188, |
|
"learning_rate": 0.001, |
|
"loss": 1.7332, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.15326315789473685, |
|
"grad_norm": 0.6993541717529297, |
|
"learning_rate": 0.001, |
|
"loss": 2.0437, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.15494736842105264, |
|
"grad_norm": 1.0041996240615845, |
|
"learning_rate": 0.001, |
|
"loss": 2.0819, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.15663157894736843, |
|
"grad_norm": 1.3755688667297363, |
|
"learning_rate": 0.001, |
|
"loss": 1.8761, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.15831578947368422, |
|
"grad_norm": 0.9399350881576538, |
|
"learning_rate": 0.001, |
|
"loss": 1.7925, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.6488239169120789, |
|
"learning_rate": 0.001, |
|
"loss": 2.009, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1616842105263158, |
|
"grad_norm": 0.9083341360092163, |
|
"learning_rate": 0.001, |
|
"loss": 2.2331, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.16336842105263158, |
|
"grad_norm": 0.6239296197891235, |
|
"learning_rate": 0.001, |
|
"loss": 1.8789, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.16505263157894737, |
|
"grad_norm": 0.7653887867927551, |
|
"learning_rate": 0.001, |
|
"loss": 2.1016, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.16673684210526316, |
|
"grad_norm": 0.6791508793830872, |
|
"learning_rate": 0.001, |
|
"loss": 2.0147, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.16842105263157894, |
|
"grad_norm": 0.6757349967956543, |
|
"learning_rate": 0.001, |
|
"loss": 1.909, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.17010526315789473, |
|
"grad_norm": 0.5010210275650024, |
|
"learning_rate": 0.001, |
|
"loss": 2.0148, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.17178947368421052, |
|
"grad_norm": 0.6564686894416809, |
|
"learning_rate": 0.001, |
|
"loss": 1.707, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1734736842105263, |
|
"grad_norm": 0.6926625370979309, |
|
"learning_rate": 0.001, |
|
"loss": 2.1313, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1751578947368421, |
|
"grad_norm": 0.8134363293647766, |
|
"learning_rate": 0.001, |
|
"loss": 1.8948, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.17684210526315788, |
|
"grad_norm": 0.8722719550132751, |
|
"learning_rate": 0.001, |
|
"loss": 1.9564, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.17852631578947367, |
|
"grad_norm": 1.5459606647491455, |
|
"learning_rate": 0.001, |
|
"loss": 1.4568, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.18021052631578946, |
|
"grad_norm": 0.672356367111206, |
|
"learning_rate": 0.001, |
|
"loss": 2.2486, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.18189473684210528, |
|
"grad_norm": 0.6597303152084351, |
|
"learning_rate": 0.001, |
|
"loss": 2.1888, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.18357894736842106, |
|
"grad_norm": 0.6516699194908142, |
|
"learning_rate": 0.001, |
|
"loss": 1.7791, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.18526315789473685, |
|
"grad_norm": 0.6535261273384094, |
|
"learning_rate": 0.001, |
|
"loss": 1.5753, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.18694736842105264, |
|
"grad_norm": 0.5394155979156494, |
|
"learning_rate": 0.001, |
|
"loss": 1.7874, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.18863157894736843, |
|
"grad_norm": 0.6403316855430603, |
|
"learning_rate": 0.001, |
|
"loss": 1.8044, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.19031578947368422, |
|
"grad_norm": 0.6894748210906982, |
|
"learning_rate": 0.001, |
|
"loss": 1.7434, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 0.5362414717674255, |
|
"learning_rate": 0.001, |
|
"loss": 1.6198, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.1936842105263158, |
|
"grad_norm": 0.5218887329101562, |
|
"learning_rate": 0.001, |
|
"loss": 1.7941, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.19536842105263158, |
|
"grad_norm": 0.5951269865036011, |
|
"learning_rate": 0.001, |
|
"loss": 2.115, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.19705263157894737, |
|
"grad_norm": 1.0313245058059692, |
|
"learning_rate": 0.001, |
|
"loss": 1.9144, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.19873684210526316, |
|
"grad_norm": 0.6776890754699707, |
|
"learning_rate": 0.001, |
|
"loss": 1.6852, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.20042105263157894, |
|
"grad_norm": 0.5906718373298645, |
|
"learning_rate": 0.001, |
|
"loss": 1.7103, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.20210526315789473, |
|
"grad_norm": 0.6788285970687866, |
|
"learning_rate": 0.001, |
|
"loss": 2.1048, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.20378947368421052, |
|
"grad_norm": 0.7527502179145813, |
|
"learning_rate": 0.001, |
|
"loss": 1.8199, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2054736842105263, |
|
"grad_norm": 0.5279136896133423, |
|
"learning_rate": 0.001, |
|
"loss": 1.5327, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2071578947368421, |
|
"grad_norm": 0.7087485194206238, |
|
"learning_rate": 0.001, |
|
"loss": 1.8483, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.20884210526315788, |
|
"grad_norm": 0.7274911999702454, |
|
"learning_rate": 0.001, |
|
"loss": 2.4062, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 0.5436732769012451, |
|
"learning_rate": 0.001, |
|
"loss": 1.7369, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.21221052631578946, |
|
"grad_norm": 0.5522803664207458, |
|
"learning_rate": 0.001, |
|
"loss": 1.8024, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.21389473684210528, |
|
"grad_norm": 0.7198563814163208, |
|
"learning_rate": 0.001, |
|
"loss": 2.2969, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.21557894736842106, |
|
"grad_norm": 0.6230013370513916, |
|
"learning_rate": 0.001, |
|
"loss": 1.8566, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.21726315789473685, |
|
"grad_norm": 0.5977436304092407, |
|
"learning_rate": 0.001, |
|
"loss": 1.339, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.21894736842105264, |
|
"grad_norm": 0.5400142669677734, |
|
"learning_rate": 0.001, |
|
"loss": 1.6346, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.22063157894736843, |
|
"grad_norm": 0.6537740230560303, |
|
"learning_rate": 0.001, |
|
"loss": 1.981, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.22231578947368422, |
|
"grad_norm": 0.6012418866157532, |
|
"learning_rate": 0.001, |
|
"loss": 1.9957, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.224, |
|
"grad_norm": 0.6363667845726013, |
|
"learning_rate": 0.001, |
|
"loss": 2.0906, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.2256842105263158, |
|
"grad_norm": 0.7009410262107849, |
|
"learning_rate": 0.001, |
|
"loss": 2.0259, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.22736842105263158, |
|
"grad_norm": 0.6076754331588745, |
|
"learning_rate": 0.001, |
|
"loss": 2.0623, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.22905263157894737, |
|
"grad_norm": 0.6430286169052124, |
|
"learning_rate": 0.001, |
|
"loss": 1.9965, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.23073684210526316, |
|
"grad_norm": 0.7743528485298157, |
|
"learning_rate": 0.001, |
|
"loss": 1.6476, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.23242105263157894, |
|
"grad_norm": 0.7490441203117371, |
|
"learning_rate": 0.001, |
|
"loss": 2.1016, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.23410526315789473, |
|
"grad_norm": 0.6852337121963501, |
|
"learning_rate": 0.001, |
|
"loss": 1.7251, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.23578947368421052, |
|
"grad_norm": 0.6012661457061768, |
|
"learning_rate": 0.001, |
|
"loss": 1.985, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2374736842105263, |
|
"grad_norm": 0.48385104537010193, |
|
"learning_rate": 0.001, |
|
"loss": 1.7926, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2391578947368421, |
|
"grad_norm": 0.5751200914382935, |
|
"learning_rate": 0.001, |
|
"loss": 1.8795, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.24084210526315789, |
|
"grad_norm": 0.571426510810852, |
|
"learning_rate": 0.001, |
|
"loss": 1.6457, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.24252631578947367, |
|
"grad_norm": 0.6982892751693726, |
|
"learning_rate": 0.001, |
|
"loss": 2.0313, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.24421052631578946, |
|
"grad_norm": 0.7380142211914062, |
|
"learning_rate": 0.001, |
|
"loss": 2.1306, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.24589473684210525, |
|
"grad_norm": 0.97590571641922, |
|
"learning_rate": 0.001, |
|
"loss": 1.9926, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.24757894736842107, |
|
"grad_norm": 0.8416200876235962, |
|
"learning_rate": 0.001, |
|
"loss": 1.733, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.24926315789473685, |
|
"grad_norm": 0.6639004945755005, |
|
"learning_rate": 0.001, |
|
"loss": 1.9836, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.25094736842105264, |
|
"grad_norm": 0.6488214135169983, |
|
"learning_rate": 0.001, |
|
"loss": 1.8941, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.25094736842105264, |
|
"eval_loss": 1.7599804401397705, |
|
"eval_runtime": 0.1738, |
|
"eval_samples_per_second": 5.754, |
|
"eval_steps_per_second": 5.754, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.25263157894736843, |
|
"grad_norm": 0.524825930595398, |
|
"learning_rate": 0.001, |
|
"loss": 1.7767, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2543157894736842, |
|
"grad_norm": 0.49492335319519043, |
|
"learning_rate": 0.001, |
|
"loss": 1.775, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.256, |
|
"grad_norm": 0.5911272168159485, |
|
"learning_rate": 0.001, |
|
"loss": 1.8286, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2576842105263158, |
|
"grad_norm": 0.8157614469528198, |
|
"learning_rate": 0.001, |
|
"loss": 1.8913, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.2593684210526316, |
|
"grad_norm": 0.7529662847518921, |
|
"learning_rate": 0.001, |
|
"loss": 1.8988, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.26105263157894737, |
|
"grad_norm": 0.8185762763023376, |
|
"learning_rate": 0.001, |
|
"loss": 2.1685, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.26273684210526316, |
|
"grad_norm": 0.7138445377349854, |
|
"learning_rate": 0.001, |
|
"loss": 1.8507, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.26442105263157895, |
|
"grad_norm": 0.5665900707244873, |
|
"learning_rate": 0.001, |
|
"loss": 1.525, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.26610526315789473, |
|
"grad_norm": 0.6799633502960205, |
|
"learning_rate": 0.001, |
|
"loss": 1.9605, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2677894736842105, |
|
"grad_norm": 0.6787411570549011, |
|
"learning_rate": 0.001, |
|
"loss": 1.6556, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2694736842105263, |
|
"grad_norm": 0.8427496552467346, |
|
"learning_rate": 0.001, |
|
"loss": 1.8083, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2711578947368421, |
|
"grad_norm": 0.6665315628051758, |
|
"learning_rate": 0.001, |
|
"loss": 1.966, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2728421052631579, |
|
"grad_norm": 0.6209701895713806, |
|
"learning_rate": 0.001, |
|
"loss": 1.79, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2745263157894737, |
|
"grad_norm": 0.5687562823295593, |
|
"learning_rate": 0.001, |
|
"loss": 1.779, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.27621052631578946, |
|
"grad_norm": 0.5852699279785156, |
|
"learning_rate": 0.001, |
|
"loss": 1.4817, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.27789473684210525, |
|
"grad_norm": 0.6601601839065552, |
|
"learning_rate": 0.001, |
|
"loss": 1.9279, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.27957894736842104, |
|
"grad_norm": 0.5629734992980957, |
|
"learning_rate": 0.001, |
|
"loss": 1.821, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.2812631578947368, |
|
"grad_norm": 0.7956101894378662, |
|
"learning_rate": 0.001, |
|
"loss": 1.9925, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.2829473684210526, |
|
"grad_norm": 0.7143905758857727, |
|
"learning_rate": 0.001, |
|
"loss": 2.0572, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2846315789473684, |
|
"grad_norm": 0.7645180821418762, |
|
"learning_rate": 0.001, |
|
"loss": 1.9208, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.2863157894736842, |
|
"grad_norm": 0.7295411825180054, |
|
"learning_rate": 0.001, |
|
"loss": 1.962, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.288, |
|
"grad_norm": 0.7587769031524658, |
|
"learning_rate": 0.001, |
|
"loss": 1.9372, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.28968421052631577, |
|
"grad_norm": 0.6111007332801819, |
|
"learning_rate": 0.001, |
|
"loss": 1.5688, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.29136842105263155, |
|
"grad_norm": 0.7311589121818542, |
|
"learning_rate": 0.001, |
|
"loss": 1.8835, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.29305263157894734, |
|
"grad_norm": 0.6812251210212708, |
|
"learning_rate": 0.001, |
|
"loss": 1.8754, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.29473684210526313, |
|
"grad_norm": 0.6704198718070984, |
|
"learning_rate": 0.001, |
|
"loss": 1.8445, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.296421052631579, |
|
"grad_norm": 0.7953410148620605, |
|
"learning_rate": 0.001, |
|
"loss": 2.0227, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.29810526315789476, |
|
"grad_norm": 0.8933955430984497, |
|
"learning_rate": 0.001, |
|
"loss": 1.9599, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.29978947368421055, |
|
"grad_norm": 0.7686247825622559, |
|
"learning_rate": 0.001, |
|
"loss": 1.8684, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.30147368421052634, |
|
"grad_norm": 0.6903300881385803, |
|
"learning_rate": 0.001, |
|
"loss": 1.9449, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3031578947368421, |
|
"grad_norm": 0.6970056295394897, |
|
"learning_rate": 0.001, |
|
"loss": 1.6342, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3048421052631579, |
|
"grad_norm": 0.5772702693939209, |
|
"learning_rate": 0.001, |
|
"loss": 1.7464, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3065263157894737, |
|
"grad_norm": 0.6376874446868896, |
|
"learning_rate": 0.001, |
|
"loss": 1.82, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.3082105263157895, |
|
"grad_norm": 0.761457085609436, |
|
"learning_rate": 0.001, |
|
"loss": 1.7796, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.3098947368421053, |
|
"grad_norm": 0.6312285661697388, |
|
"learning_rate": 0.001, |
|
"loss": 2.0363, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.31157894736842107, |
|
"grad_norm": 0.8040784001350403, |
|
"learning_rate": 0.001, |
|
"loss": 2.0369, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.31326315789473685, |
|
"grad_norm": 0.7520210146903992, |
|
"learning_rate": 0.001, |
|
"loss": 2.1518, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.31494736842105264, |
|
"grad_norm": 0.6293883919715881, |
|
"learning_rate": 0.001, |
|
"loss": 1.9187, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.31663157894736843, |
|
"grad_norm": 0.7219449877738953, |
|
"learning_rate": 0.001, |
|
"loss": 1.5069, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.3183157894736842, |
|
"grad_norm": 0.8080244660377502, |
|
"learning_rate": 0.001, |
|
"loss": 1.6169, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.8044946193695068, |
|
"learning_rate": 0.001, |
|
"loss": 1.8206, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3216842105263158, |
|
"grad_norm": 0.894588828086853, |
|
"learning_rate": 0.001, |
|
"loss": 2.0623, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.3233684210526316, |
|
"grad_norm": 0.6865862607955933, |
|
"learning_rate": 0.001, |
|
"loss": 1.7629, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.32505263157894737, |
|
"grad_norm": 0.9992401003837585, |
|
"learning_rate": 0.001, |
|
"loss": 2.2471, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.32673684210526316, |
|
"grad_norm": 0.8792619705200195, |
|
"learning_rate": 0.001, |
|
"loss": 2.0458, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.32842105263157895, |
|
"grad_norm": 0.8508814573287964, |
|
"learning_rate": 0.001, |
|
"loss": 1.8746, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.33010526315789473, |
|
"grad_norm": 0.6977102756500244, |
|
"learning_rate": 0.001, |
|
"loss": 2.0411, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.3317894736842105, |
|
"grad_norm": 0.8430894017219543, |
|
"learning_rate": 0.001, |
|
"loss": 2.0066, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.3334736842105263, |
|
"grad_norm": 0.8048614859580994, |
|
"learning_rate": 0.001, |
|
"loss": 1.9879, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.3351578947368421, |
|
"grad_norm": 0.8604184985160828, |
|
"learning_rate": 0.001, |
|
"loss": 2.0952, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.3368421052631579, |
|
"grad_norm": 1.0472347736358643, |
|
"learning_rate": 0.001, |
|
"loss": 1.8939, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3385263157894737, |
|
"grad_norm": 0.761587381362915, |
|
"learning_rate": 0.001, |
|
"loss": 1.9897, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.34021052631578946, |
|
"grad_norm": 0.7086905837059021, |
|
"learning_rate": 0.001, |
|
"loss": 1.8068, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.34189473684210525, |
|
"grad_norm": 0.8213825225830078, |
|
"learning_rate": 0.001, |
|
"loss": 1.682, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.34357894736842104, |
|
"grad_norm": 0.8660598397254944, |
|
"learning_rate": 0.001, |
|
"loss": 2.0042, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.3452631578947368, |
|
"grad_norm": 0.9913591742515564, |
|
"learning_rate": 0.001, |
|
"loss": 1.9211, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3469473684210526, |
|
"grad_norm": 0.7640036940574646, |
|
"learning_rate": 0.001, |
|
"loss": 2.0904, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.3486315789473684, |
|
"grad_norm": 0.7359378337860107, |
|
"learning_rate": 0.001, |
|
"loss": 1.9424, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.3503157894736842, |
|
"grad_norm": 0.7246221303939819, |
|
"learning_rate": 0.001, |
|
"loss": 1.9887, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.352, |
|
"grad_norm": 0.8169429302215576, |
|
"learning_rate": 0.001, |
|
"loss": 1.8335, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.35368421052631577, |
|
"grad_norm": 0.903678834438324, |
|
"learning_rate": 0.001, |
|
"loss": 1.6703, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.35536842105263156, |
|
"grad_norm": 0.7328379154205322, |
|
"learning_rate": 0.001, |
|
"loss": 2.1565, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.35705263157894734, |
|
"grad_norm": 0.8003093004226685, |
|
"learning_rate": 0.001, |
|
"loss": 1.7172, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.35873684210526313, |
|
"grad_norm": 0.7532063722610474, |
|
"learning_rate": 0.001, |
|
"loss": 2.0264, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.3604210526315789, |
|
"grad_norm": 0.7619852423667908, |
|
"learning_rate": 0.001, |
|
"loss": 1.7766, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.36210526315789476, |
|
"grad_norm": 0.7145585417747498, |
|
"learning_rate": 0.001, |
|
"loss": 1.8829, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.36378947368421055, |
|
"grad_norm": 0.739275336265564, |
|
"learning_rate": 0.001, |
|
"loss": 1.6963, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.36547368421052634, |
|
"grad_norm": 0.8174360990524292, |
|
"learning_rate": 0.001, |
|
"loss": 2.028, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3671578947368421, |
|
"grad_norm": 0.7873148322105408, |
|
"learning_rate": 0.001, |
|
"loss": 1.9912, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.3688421052631579, |
|
"grad_norm": 0.7683485746383667, |
|
"learning_rate": 0.001, |
|
"loss": 2.0106, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.3705263157894737, |
|
"grad_norm": 0.841464102268219, |
|
"learning_rate": 0.001, |
|
"loss": 1.3909, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3722105263157895, |
|
"grad_norm": 0.9224113821983337, |
|
"learning_rate": 0.001, |
|
"loss": 2.0642, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.3738947368421053, |
|
"grad_norm": 1.3310387134552002, |
|
"learning_rate": 0.001, |
|
"loss": 2.0075, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.37557894736842107, |
|
"grad_norm": 0.8936915397644043, |
|
"learning_rate": 0.001, |
|
"loss": 1.8838, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.37726315789473686, |
|
"grad_norm": 0.7084046602249146, |
|
"learning_rate": 0.001, |
|
"loss": 2.0105, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.37894736842105264, |
|
"grad_norm": 0.802139401435852, |
|
"learning_rate": 0.001, |
|
"loss": 2.1188, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.38063157894736843, |
|
"grad_norm": 0.8018360137939453, |
|
"learning_rate": 0.001, |
|
"loss": 1.8581, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.3823157894736842, |
|
"grad_norm": 0.8070486187934875, |
|
"learning_rate": 0.001, |
|
"loss": 1.9121, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 0.7557722330093384, |
|
"learning_rate": 0.001, |
|
"loss": 1.8515, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.3856842105263158, |
|
"grad_norm": 0.8111100196838379, |
|
"learning_rate": 0.001, |
|
"loss": 2.0789, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.3873684210526316, |
|
"grad_norm": 0.9642356038093567, |
|
"learning_rate": 0.001, |
|
"loss": 2.2856, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.38905263157894737, |
|
"grad_norm": 0.9470245242118835, |
|
"learning_rate": 0.001, |
|
"loss": 2.0114, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.39073684210526316, |
|
"grad_norm": 0.8576509952545166, |
|
"learning_rate": 0.001, |
|
"loss": 1.8915, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.39242105263157895, |
|
"grad_norm": 0.8524518013000488, |
|
"learning_rate": 0.001, |
|
"loss": 1.9937, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.39410526315789474, |
|
"grad_norm": 1.523067831993103, |
|
"learning_rate": 0.001, |
|
"loss": 1.641, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.3957894736842105, |
|
"grad_norm": 0.9369080066680908, |
|
"learning_rate": 0.001, |
|
"loss": 2.0977, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.3974736842105263, |
|
"grad_norm": 0.8704274296760559, |
|
"learning_rate": 0.001, |
|
"loss": 2.0126, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.3991578947368421, |
|
"grad_norm": 0.8420674800872803, |
|
"learning_rate": 0.001, |
|
"loss": 1.9577, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.4008421052631579, |
|
"grad_norm": 0.7344264388084412, |
|
"learning_rate": 0.001, |
|
"loss": 1.8681, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.4025263157894737, |
|
"grad_norm": 0.7144782543182373, |
|
"learning_rate": 0.001, |
|
"loss": 1.9696, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.40421052631578946, |
|
"grad_norm": 0.8455988168716431, |
|
"learning_rate": 0.001, |
|
"loss": 1.7568, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.40589473684210525, |
|
"grad_norm": 0.807806134223938, |
|
"learning_rate": 0.001, |
|
"loss": 2.1119, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.40757894736842104, |
|
"grad_norm": 0.8274264335632324, |
|
"learning_rate": 0.001, |
|
"loss": 1.9695, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.40926315789473683, |
|
"grad_norm": 0.9100606441497803, |
|
"learning_rate": 0.001, |
|
"loss": 1.9766, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.4109473684210526, |
|
"grad_norm": 1.1465590000152588, |
|
"learning_rate": 0.001, |
|
"loss": 1.8377, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.4126315789473684, |
|
"grad_norm": 0.7355701327323914, |
|
"learning_rate": 0.001, |
|
"loss": 1.3323, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.4143157894736842, |
|
"grad_norm": 0.8275692462921143, |
|
"learning_rate": 0.001, |
|
"loss": 1.9701, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.416, |
|
"grad_norm": 0.848210334777832, |
|
"learning_rate": 0.001, |
|
"loss": 2.0912, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.41768421052631577, |
|
"grad_norm": 0.8262030482292175, |
|
"learning_rate": 0.001, |
|
"loss": 2.115, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.41936842105263156, |
|
"grad_norm": 0.6998792886734009, |
|
"learning_rate": 0.001, |
|
"loss": 1.8737, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.42105263157894735, |
|
"grad_norm": 0.8081846237182617, |
|
"learning_rate": 0.001, |
|
"loss": 2.2837, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.42273684210526313, |
|
"grad_norm": 0.8310023546218872, |
|
"learning_rate": 0.001, |
|
"loss": 2.2355, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.4244210526315789, |
|
"grad_norm": 1.157334804534912, |
|
"learning_rate": 0.001, |
|
"loss": 2.3762, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.4261052631578947, |
|
"grad_norm": 0.7893511056900024, |
|
"learning_rate": 0.001, |
|
"loss": 1.8737, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.42778947368421055, |
|
"grad_norm": 0.959355354309082, |
|
"learning_rate": 0.001, |
|
"loss": 2.0047, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.42947368421052634, |
|
"grad_norm": 0.8904256224632263, |
|
"learning_rate": 0.001, |
|
"loss": 1.8809, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.43115789473684213, |
|
"grad_norm": 0.9054950475692749, |
|
"learning_rate": 0.001, |
|
"loss": 2.3487, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.4328421052631579, |
|
"grad_norm": 0.8389487862586975, |
|
"learning_rate": 0.001, |
|
"loss": 2.0993, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.4345263157894737, |
|
"grad_norm": 1.0516859292984009, |
|
"learning_rate": 0.001, |
|
"loss": 1.9482, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.4362105263157895, |
|
"grad_norm": 1.0936013460159302, |
|
"learning_rate": 0.001, |
|
"loss": 2.2888, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.4378947368421053, |
|
"grad_norm": 0.894350528717041, |
|
"learning_rate": 0.001, |
|
"loss": 1.9703, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.43957894736842107, |
|
"grad_norm": 0.8147197961807251, |
|
"learning_rate": 0.001, |
|
"loss": 1.8964, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.44126315789473686, |
|
"grad_norm": 0.6683039665222168, |
|
"learning_rate": 0.001, |
|
"loss": 1.8701, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.44294736842105265, |
|
"grad_norm": 0.83613121509552, |
|
"learning_rate": 0.001, |
|
"loss": 2.3627, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.44463157894736843, |
|
"grad_norm": 0.724908173084259, |
|
"learning_rate": 0.001, |
|
"loss": 1.8411, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.4463157894736842, |
|
"grad_norm": 0.7576204538345337, |
|
"learning_rate": 0.001, |
|
"loss": 2.1275, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.448, |
|
"grad_norm": 0.7902230620384216, |
|
"learning_rate": 0.001, |
|
"loss": 1.663, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4496842105263158, |
|
"grad_norm": 1.5043684244155884, |
|
"learning_rate": 0.001, |
|
"loss": 2.1615, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.4513684210526316, |
|
"grad_norm": 0.8250028491020203, |
|
"learning_rate": 0.001, |
|
"loss": 1.8787, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.4530526315789474, |
|
"grad_norm": 0.7849893569946289, |
|
"learning_rate": 0.001, |
|
"loss": 2.0168, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.45473684210526316, |
|
"grad_norm": 0.8021153807640076, |
|
"learning_rate": 0.001, |
|
"loss": 2.2486, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.45642105263157895, |
|
"grad_norm": 0.7869856953620911, |
|
"learning_rate": 0.001, |
|
"loss": 1.5522, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.45810526315789474, |
|
"grad_norm": 0.813165009021759, |
|
"learning_rate": 0.001, |
|
"loss": 2.0801, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.4597894736842105, |
|
"grad_norm": 0.8223312497138977, |
|
"learning_rate": 0.001, |
|
"loss": 1.9111, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.4614736842105263, |
|
"grad_norm": 0.8650989532470703, |
|
"learning_rate": 0.001, |
|
"loss": 1.716, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.4631578947368421, |
|
"grad_norm": 0.7596947550773621, |
|
"learning_rate": 0.001, |
|
"loss": 1.9046, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.4648421052631579, |
|
"grad_norm": 0.7211440801620483, |
|
"learning_rate": 0.001, |
|
"loss": 1.7714, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.4665263157894737, |
|
"grad_norm": 1.0673142671585083, |
|
"learning_rate": 0.001, |
|
"loss": 2.1321, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.46821052631578947, |
|
"grad_norm": 0.7947107553482056, |
|
"learning_rate": 0.001, |
|
"loss": 1.9446, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.46989473684210525, |
|
"grad_norm": 0.8121020197868347, |
|
"learning_rate": 0.001, |
|
"loss": 2.217, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.47157894736842104, |
|
"grad_norm": 0.7495191097259521, |
|
"learning_rate": 0.001, |
|
"loss": 1.891, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.47326315789473683, |
|
"grad_norm": 0.7859931588172913, |
|
"learning_rate": 0.001, |
|
"loss": 1.5517, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.4749473684210526, |
|
"grad_norm": 0.8961056470870972, |
|
"learning_rate": 0.001, |
|
"loss": 2.0886, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.4766315789473684, |
|
"grad_norm": 0.7971674799919128, |
|
"learning_rate": 0.001, |
|
"loss": 1.8596, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.4783157894736842, |
|
"grad_norm": 0.881367564201355, |
|
"learning_rate": 0.001, |
|
"loss": 1.9999, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.883185863494873, |
|
"learning_rate": 0.001, |
|
"loss": 2.0156, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.48168421052631577, |
|
"grad_norm": 0.8560335636138916, |
|
"learning_rate": 0.001, |
|
"loss": 2.0237, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.48336842105263156, |
|
"grad_norm": 1.038077712059021, |
|
"learning_rate": 0.001, |
|
"loss": 1.6664, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.48505263157894735, |
|
"grad_norm": 0.7434845566749573, |
|
"learning_rate": 0.001, |
|
"loss": 1.9518, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.48673684210526313, |
|
"grad_norm": 1.099915862083435, |
|
"learning_rate": 0.001, |
|
"loss": 2.1582, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.4884210526315789, |
|
"grad_norm": 0.7814631462097168, |
|
"learning_rate": 0.001, |
|
"loss": 1.7189, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4901052631578947, |
|
"grad_norm": 0.9618262052536011, |
|
"learning_rate": 0.001, |
|
"loss": 2.1185, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.4917894736842105, |
|
"grad_norm": 0.7547399401664734, |
|
"learning_rate": 0.001, |
|
"loss": 1.8551, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.49347368421052634, |
|
"grad_norm": 0.891696035861969, |
|
"learning_rate": 0.001, |
|
"loss": 2.1148, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.49515789473684213, |
|
"grad_norm": 0.9156106114387512, |
|
"learning_rate": 0.001, |
|
"loss": 1.829, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.4968421052631579, |
|
"grad_norm": 0.8770383596420288, |
|
"learning_rate": 0.001, |
|
"loss": 1.9336, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.4985263157894737, |
|
"grad_norm": 0.7889037132263184, |
|
"learning_rate": 0.001, |
|
"loss": 1.7122, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5002105263157894, |
|
"grad_norm": 1.0132378339767456, |
|
"learning_rate": 0.001, |
|
"loss": 2.0484, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5018947368421053, |
|
"grad_norm": 0.852583110332489, |
|
"learning_rate": 0.001, |
|
"loss": 1.9974, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5018947368421053, |
|
"eval_loss": 1.6337618827819824, |
|
"eval_runtime": 0.0834, |
|
"eval_samples_per_second": 11.984, |
|
"eval_steps_per_second": 11.984, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.503578947368421, |
|
"grad_norm": 1.0435866117477417, |
|
"learning_rate": 0.001, |
|
"loss": 2.105, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5052631578947369, |
|
"grad_norm": 0.754615843296051, |
|
"learning_rate": 0.001, |
|
"loss": 2.0089, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5069473684210526, |
|
"grad_norm": 1.012373924255371, |
|
"learning_rate": 0.001, |
|
"loss": 2.3203, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5086315789473684, |
|
"grad_norm": 0.7808589935302734, |
|
"learning_rate": 0.001, |
|
"loss": 2.1087, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5103157894736842, |
|
"grad_norm": 0.8035853505134583, |
|
"learning_rate": 0.001, |
|
"loss": 2.1473, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.512, |
|
"grad_norm": 0.7854329943656921, |
|
"learning_rate": 0.001, |
|
"loss": 1.9042, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5136842105263157, |
|
"grad_norm": 0.8837404251098633, |
|
"learning_rate": 0.001, |
|
"loss": 1.6176, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5153684210526316, |
|
"grad_norm": 0.9439155459403992, |
|
"learning_rate": 0.001, |
|
"loss": 2.1619, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5170526315789473, |
|
"grad_norm": 0.836586058139801, |
|
"learning_rate": 0.001, |
|
"loss": 1.9484, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5187368421052632, |
|
"grad_norm": 0.8734055161476135, |
|
"learning_rate": 0.001, |
|
"loss": 2.0695, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5204210526315789, |
|
"grad_norm": 0.8716776967048645, |
|
"learning_rate": 0.001, |
|
"loss": 2.1273, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5221052631578947, |
|
"grad_norm": 0.9540092349052429, |
|
"learning_rate": 0.001, |
|
"loss": 1.8999, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5237894736842105, |
|
"grad_norm": 1.1694831848144531, |
|
"learning_rate": 0.001, |
|
"loss": 1.728, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5254736842105263, |
|
"grad_norm": 0.7269738912582397, |
|
"learning_rate": 0.001, |
|
"loss": 2.0951, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5271578947368422, |
|
"grad_norm": 0.7646914720535278, |
|
"learning_rate": 0.001, |
|
"loss": 1.8231, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.5288421052631579, |
|
"grad_norm": 0.8613254427909851, |
|
"learning_rate": 0.001, |
|
"loss": 1.9105, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5305263157894737, |
|
"grad_norm": 1.0191853046417236, |
|
"learning_rate": 0.001, |
|
"loss": 1.7064, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5322105263157895, |
|
"grad_norm": 1.2197155952453613, |
|
"learning_rate": 0.001, |
|
"loss": 1.7123, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.5338947368421053, |
|
"grad_norm": 0.818133533000946, |
|
"learning_rate": 0.001, |
|
"loss": 1.7865, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.535578947368421, |
|
"grad_norm": 0.8760883808135986, |
|
"learning_rate": 0.001, |
|
"loss": 2.1209, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.5372631578947369, |
|
"grad_norm": 1.0778782367706299, |
|
"learning_rate": 0.001, |
|
"loss": 2.0902, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.5389473684210526, |
|
"grad_norm": 0.8181326985359192, |
|
"learning_rate": 0.001, |
|
"loss": 1.9372, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5406315789473685, |
|
"grad_norm": 0.9272657036781311, |
|
"learning_rate": 0.001, |
|
"loss": 2.0478, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.5423157894736842, |
|
"grad_norm": 0.9218736290931702, |
|
"learning_rate": 0.001, |
|
"loss": 2.108, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.544, |
|
"grad_norm": 1.0985972881317139, |
|
"learning_rate": 0.001, |
|
"loss": 2.0267, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.5456842105263158, |
|
"grad_norm": 0.8310480713844299, |
|
"learning_rate": 0.001, |
|
"loss": 1.7772, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.5473684210526316, |
|
"grad_norm": 0.8774259090423584, |
|
"learning_rate": 0.001, |
|
"loss": 2.0248, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5490526315789473, |
|
"grad_norm": 1.0681616067886353, |
|
"learning_rate": 0.001, |
|
"loss": 2.2355, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.5507368421052632, |
|
"grad_norm": 0.9428539872169495, |
|
"learning_rate": 0.001, |
|
"loss": 1.9988, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.5524210526315789, |
|
"grad_norm": 1.0054833889007568, |
|
"learning_rate": 0.001, |
|
"loss": 1.9063, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.5541052631578948, |
|
"grad_norm": 0.8005337715148926, |
|
"learning_rate": 0.001, |
|
"loss": 2.1752, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.5557894736842105, |
|
"grad_norm": 1.109134554862976, |
|
"learning_rate": 0.001, |
|
"loss": 2.3235, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5574736842105263, |
|
"grad_norm": 0.9584336280822754, |
|
"learning_rate": 0.001, |
|
"loss": 1.7009, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.5591578947368421, |
|
"grad_norm": 1.2622302770614624, |
|
"learning_rate": 0.001, |
|
"loss": 2.0998, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.5608421052631579, |
|
"grad_norm": 0.92564457654953, |
|
"learning_rate": 0.001, |
|
"loss": 1.7039, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.5625263157894737, |
|
"grad_norm": 0.7569521069526672, |
|
"learning_rate": 0.001, |
|
"loss": 1.7963, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.5642105263157895, |
|
"grad_norm": 0.7915797233581543, |
|
"learning_rate": 0.001, |
|
"loss": 1.645, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.5658947368421052, |
|
"grad_norm": 0.7300320863723755, |
|
"learning_rate": 0.001, |
|
"loss": 1.7476, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.5675789473684211, |
|
"grad_norm": 1.1384440660476685, |
|
"learning_rate": 0.001, |
|
"loss": 2.1937, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.5692631578947368, |
|
"grad_norm": 0.8770859241485596, |
|
"learning_rate": 0.001, |
|
"loss": 1.9664, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.5709473684210526, |
|
"grad_norm": 0.9081368446350098, |
|
"learning_rate": 0.001, |
|
"loss": 1.4632, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.5726315789473684, |
|
"grad_norm": 0.8865834474563599, |
|
"learning_rate": 0.001, |
|
"loss": 1.7578, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5743157894736842, |
|
"grad_norm": 0.8756502866744995, |
|
"learning_rate": 0.001, |
|
"loss": 1.9929, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 0.8333286643028259, |
|
"learning_rate": 0.001, |
|
"loss": 2.3068, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.5776842105263158, |
|
"grad_norm": 0.8217945098876953, |
|
"learning_rate": 0.001, |
|
"loss": 1.9818, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.5793684210526315, |
|
"grad_norm": 0.8414101004600525, |
|
"learning_rate": 0.001, |
|
"loss": 2.0111, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.5810526315789474, |
|
"grad_norm": 0.9645239114761353, |
|
"learning_rate": 0.001, |
|
"loss": 2.2026, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.5827368421052631, |
|
"grad_norm": 0.9366424083709717, |
|
"learning_rate": 0.001, |
|
"loss": 2.1754, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.584421052631579, |
|
"grad_norm": 0.839468240737915, |
|
"learning_rate": 0.001, |
|
"loss": 1.8966, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.5861052631578947, |
|
"grad_norm": 0.9215678572654724, |
|
"learning_rate": 0.001, |
|
"loss": 2.1501, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.5877894736842105, |
|
"grad_norm": 1.0060967206954956, |
|
"learning_rate": 0.001, |
|
"loss": 1.9594, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.5894736842105263, |
|
"grad_norm": 0.9866886734962463, |
|
"learning_rate": 0.001, |
|
"loss": 2.0878, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5911578947368421, |
|
"grad_norm": 1.0554858446121216, |
|
"learning_rate": 0.001, |
|
"loss": 1.7109, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.592842105263158, |
|
"grad_norm": 0.9574116468429565, |
|
"learning_rate": 0.001, |
|
"loss": 2.1836, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.5945263157894737, |
|
"grad_norm": 0.9625939726829529, |
|
"learning_rate": 0.001, |
|
"loss": 2.0379, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.5962105263157895, |
|
"grad_norm": 0.9140836000442505, |
|
"learning_rate": 0.001, |
|
"loss": 2.0564, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.5978947368421053, |
|
"grad_norm": 0.9520573616027832, |
|
"learning_rate": 0.001, |
|
"loss": 1.9383, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.5995789473684211, |
|
"grad_norm": 0.875503659248352, |
|
"learning_rate": 0.001, |
|
"loss": 1.6936, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6012631578947368, |
|
"grad_norm": 1.461020827293396, |
|
"learning_rate": 0.001, |
|
"loss": 2.2684, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6029473684210527, |
|
"grad_norm": 0.8192405700683594, |
|
"learning_rate": 0.001, |
|
"loss": 1.8995, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6046315789473684, |
|
"grad_norm": 1.4530872106552124, |
|
"learning_rate": 0.001, |
|
"loss": 1.8539, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.6063157894736843, |
|
"grad_norm": 0.959186315536499, |
|
"learning_rate": 0.001, |
|
"loss": 2.0851, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.608, |
|
"grad_norm": 0.8276315331459045, |
|
"learning_rate": 0.001, |
|
"loss": 1.9521, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.6096842105263158, |
|
"grad_norm": 1.2478163242340088, |
|
"learning_rate": 0.001, |
|
"loss": 1.9309, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6113684210526316, |
|
"grad_norm": 1.1320995092391968, |
|
"learning_rate": 0.001, |
|
"loss": 1.9807, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6130526315789474, |
|
"grad_norm": 0.9767136573791504, |
|
"learning_rate": 0.001, |
|
"loss": 1.9802, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6147368421052631, |
|
"grad_norm": 0.8936948776245117, |
|
"learning_rate": 0.001, |
|
"loss": 1.9986, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.616421052631579, |
|
"grad_norm": 0.7911234498023987, |
|
"learning_rate": 0.001, |
|
"loss": 2.0896, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6181052631578947, |
|
"grad_norm": 0.7824344635009766, |
|
"learning_rate": 0.001, |
|
"loss": 1.4767, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6197894736842106, |
|
"grad_norm": 0.9858822822570801, |
|
"learning_rate": 0.001, |
|
"loss": 1.9758, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.6214736842105263, |
|
"grad_norm": 0.942699670791626, |
|
"learning_rate": 0.001, |
|
"loss": 1.9038, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.6231578947368421, |
|
"grad_norm": 1.0846315622329712, |
|
"learning_rate": 0.001, |
|
"loss": 1.8084, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6248421052631579, |
|
"grad_norm": 0.9172139167785645, |
|
"learning_rate": 0.001, |
|
"loss": 1.8795, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.6265263157894737, |
|
"grad_norm": 0.8866816163063049, |
|
"learning_rate": 0.001, |
|
"loss": 1.8835, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6282105263157894, |
|
"grad_norm": 0.8923367261886597, |
|
"learning_rate": 0.001, |
|
"loss": 2.1602, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.6298947368421053, |
|
"grad_norm": 0.9485911130905151, |
|
"learning_rate": 0.001, |
|
"loss": 2.0832, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.631578947368421, |
|
"grad_norm": 1.026877999305725, |
|
"learning_rate": 0.001, |
|
"loss": 2.283, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6332631578947369, |
|
"grad_norm": 0.9710808396339417, |
|
"learning_rate": 0.001, |
|
"loss": 1.6655, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.6349473684210526, |
|
"grad_norm": 1.1433035135269165, |
|
"learning_rate": 0.001, |
|
"loss": 1.9597, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.6366315789473684, |
|
"grad_norm": 0.9025890827178955, |
|
"learning_rate": 0.001, |
|
"loss": 2.0539, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.6383157894736842, |
|
"grad_norm": 0.9245177507400513, |
|
"learning_rate": 0.001, |
|
"loss": 1.7705, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.932959258556366, |
|
"learning_rate": 0.001, |
|
"loss": 2.0923, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6416842105263157, |
|
"grad_norm": 0.9858509302139282, |
|
"learning_rate": 0.001, |
|
"loss": 1.9338, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.6433684210526316, |
|
"grad_norm": 1.0888968706130981, |
|
"learning_rate": 0.001, |
|
"loss": 1.8463, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.6450526315789473, |
|
"grad_norm": 0.9424766302108765, |
|
"learning_rate": 0.001, |
|
"loss": 1.8135, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.6467368421052632, |
|
"grad_norm": 0.955096960067749, |
|
"learning_rate": 0.001, |
|
"loss": 1.9355, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.6484210526315789, |
|
"grad_norm": 0.9020712375640869, |
|
"learning_rate": 0.001, |
|
"loss": 2.2235, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6501052631578947, |
|
"grad_norm": 1.2948638200759888, |
|
"learning_rate": 0.001, |
|
"loss": 1.8841, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.6517894736842105, |
|
"grad_norm": 1.1215901374816895, |
|
"learning_rate": 0.001, |
|
"loss": 2.3204, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.6534736842105263, |
|
"grad_norm": 1.000780701637268, |
|
"learning_rate": 0.001, |
|
"loss": 1.9545, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.655157894736842, |
|
"grad_norm": 1.0688225030899048, |
|
"learning_rate": 0.001, |
|
"loss": 2.1332, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.6568421052631579, |
|
"grad_norm": 0.8454869985580444, |
|
"learning_rate": 0.001, |
|
"loss": 2.2442, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.6585263157894737, |
|
"grad_norm": 1.0029394626617432, |
|
"learning_rate": 0.001, |
|
"loss": 1.9708, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.6602105263157895, |
|
"grad_norm": 1.2006776332855225, |
|
"learning_rate": 0.001, |
|
"loss": 1.9889, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.6618947368421053, |
|
"grad_norm": 0.8848575949668884, |
|
"learning_rate": 0.001, |
|
"loss": 1.8719, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.663578947368421, |
|
"grad_norm": 0.9433349370956421, |
|
"learning_rate": 0.001, |
|
"loss": 1.807, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.6652631578947369, |
|
"grad_norm": 1.1462281942367554, |
|
"learning_rate": 0.001, |
|
"loss": 1.9315, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.6669473684210526, |
|
"grad_norm": 0.9325450658798218, |
|
"learning_rate": 0.001, |
|
"loss": 1.701, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.6686315789473685, |
|
"grad_norm": 1.1033800840377808, |
|
"learning_rate": 0.001, |
|
"loss": 2.0728, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.6703157894736842, |
|
"grad_norm": 1.0380494594573975, |
|
"learning_rate": 0.001, |
|
"loss": 1.9656, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.672, |
|
"grad_norm": 0.8989469408988953, |
|
"learning_rate": 0.001, |
|
"loss": 1.9998, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.6736842105263158, |
|
"grad_norm": 1.1360111236572266, |
|
"learning_rate": 0.001, |
|
"loss": 2.1114, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.6753684210526316, |
|
"grad_norm": 0.9334345459938049, |
|
"learning_rate": 0.001, |
|
"loss": 1.7843, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.6770526315789474, |
|
"grad_norm": 0.8565030097961426, |
|
"learning_rate": 0.001, |
|
"loss": 1.8634, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.6787368421052632, |
|
"grad_norm": 0.926354706287384, |
|
"learning_rate": 0.001, |
|
"loss": 2.1583, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.6804210526315789, |
|
"grad_norm": 1.041408896446228, |
|
"learning_rate": 0.001, |
|
"loss": 2.0652, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.6821052631578948, |
|
"grad_norm": 0.8957986235618591, |
|
"learning_rate": 0.001, |
|
"loss": 1.9705, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.6837894736842105, |
|
"grad_norm": 0.866303026676178, |
|
"learning_rate": 0.001, |
|
"loss": 1.9173, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.6854736842105263, |
|
"grad_norm": 0.9298515915870667, |
|
"learning_rate": 0.001, |
|
"loss": 1.4851, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.6871578947368421, |
|
"grad_norm": 0.8325154781341553, |
|
"learning_rate": 0.001, |
|
"loss": 1.9217, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.6888421052631579, |
|
"grad_norm": 1.0331366062164307, |
|
"learning_rate": 0.001, |
|
"loss": 2.2276, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.6905263157894737, |
|
"grad_norm": 0.9623380899429321, |
|
"learning_rate": 0.001, |
|
"loss": 1.9401, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.6922105263157895, |
|
"grad_norm": 0.9502870440483093, |
|
"learning_rate": 0.001, |
|
"loss": 2.0698, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.6938947368421052, |
|
"grad_norm": 0.7351365089416504, |
|
"learning_rate": 0.001, |
|
"loss": 1.8196, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.6955789473684211, |
|
"grad_norm": 1.4265284538269043, |
|
"learning_rate": 0.001, |
|
"loss": 2.1069, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.6972631578947368, |
|
"grad_norm": 0.9151477813720703, |
|
"learning_rate": 0.001, |
|
"loss": 1.9652, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.6989473684210527, |
|
"grad_norm": 1.0058677196502686, |
|
"learning_rate": 0.001, |
|
"loss": 1.9402, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7006315789473684, |
|
"grad_norm": 0.8107333183288574, |
|
"learning_rate": 0.001, |
|
"loss": 1.8946, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7023157894736842, |
|
"grad_norm": 0.9497429132461548, |
|
"learning_rate": 0.001, |
|
"loss": 2.0289, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.704, |
|
"grad_norm": 0.9337472319602966, |
|
"learning_rate": 0.001, |
|
"loss": 1.9517, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7056842105263158, |
|
"grad_norm": 0.872475802898407, |
|
"learning_rate": 0.001, |
|
"loss": 2.0209, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.7073684210526315, |
|
"grad_norm": 0.9438268542289734, |
|
"learning_rate": 0.001, |
|
"loss": 1.9209, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7090526315789474, |
|
"grad_norm": 1.2881578207015991, |
|
"learning_rate": 0.001, |
|
"loss": 2.1235, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.7107368421052631, |
|
"grad_norm": 0.8764305114746094, |
|
"learning_rate": 0.001, |
|
"loss": 1.9836, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.712421052631579, |
|
"grad_norm": 1.232689619064331, |
|
"learning_rate": 0.001, |
|
"loss": 2.2588, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.7141052631578947, |
|
"grad_norm": 0.9619866013526917, |
|
"learning_rate": 0.001, |
|
"loss": 2.1062, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.7157894736842105, |
|
"grad_norm": 0.9023774266242981, |
|
"learning_rate": 0.001, |
|
"loss": 1.9278, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.7174736842105263, |
|
"grad_norm": 1.1033554077148438, |
|
"learning_rate": 0.001, |
|
"loss": 1.7297, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.7191578947368421, |
|
"grad_norm": 0.7463766932487488, |
|
"learning_rate": 0.001, |
|
"loss": 1.725, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.7208421052631578, |
|
"grad_norm": 0.9457252621650696, |
|
"learning_rate": 0.001, |
|
"loss": 1.7739, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.7225263157894737, |
|
"grad_norm": 0.934600293636322, |
|
"learning_rate": 0.001, |
|
"loss": 2.1775, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.7242105263157895, |
|
"grad_norm": 0.8955628871917725, |
|
"learning_rate": 0.001, |
|
"loss": 2.2009, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7258947368421053, |
|
"grad_norm": 0.94439697265625, |
|
"learning_rate": 0.001, |
|
"loss": 2.244, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.7275789473684211, |
|
"grad_norm": 0.8255289793014526, |
|
"learning_rate": 0.001, |
|
"loss": 1.8666, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.7292631578947368, |
|
"grad_norm": 0.8670260906219482, |
|
"learning_rate": 0.001, |
|
"loss": 1.7115, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.7309473684210527, |
|
"grad_norm": 0.8938360214233398, |
|
"learning_rate": 0.001, |
|
"loss": 1.6273, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.7326315789473684, |
|
"grad_norm": 1.236402988433838, |
|
"learning_rate": 0.001, |
|
"loss": 2.0744, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.7343157894736843, |
|
"grad_norm": 0.8387994170188904, |
|
"learning_rate": 0.001, |
|
"loss": 1.9027, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.736, |
|
"grad_norm": 0.8984929323196411, |
|
"learning_rate": 0.001, |
|
"loss": 2.1411, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.7376842105263158, |
|
"grad_norm": 0.9328674077987671, |
|
"learning_rate": 0.001, |
|
"loss": 2.2223, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.7393684210526316, |
|
"grad_norm": 0.8986714482307434, |
|
"learning_rate": 0.001, |
|
"loss": 1.9568, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.7410526315789474, |
|
"grad_norm": 0.9492053389549255, |
|
"learning_rate": 0.001, |
|
"loss": 2.2018, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7427368421052631, |
|
"grad_norm": 0.8261067867279053, |
|
"learning_rate": 0.001, |
|
"loss": 2.0828, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.744421052631579, |
|
"grad_norm": 1.0723634958267212, |
|
"learning_rate": 0.001, |
|
"loss": 2.2274, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.7461052631578947, |
|
"grad_norm": 0.933506190776825, |
|
"learning_rate": 0.001, |
|
"loss": 2.0962, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.7477894736842106, |
|
"grad_norm": 0.7961298227310181, |
|
"learning_rate": 0.001, |
|
"loss": 2.1599, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.7494736842105263, |
|
"grad_norm": 0.9804624319076538, |
|
"learning_rate": 0.001, |
|
"loss": 2.2872, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.7511578947368421, |
|
"grad_norm": 0.8495241403579712, |
|
"learning_rate": 0.001, |
|
"loss": 1.9647, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.7528421052631579, |
|
"grad_norm": 0.9075875878334045, |
|
"learning_rate": 0.001, |
|
"loss": 2.1734, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.7528421052631579, |
|
"eval_loss": 1.7126047611236572, |
|
"eval_runtime": 0.0791, |
|
"eval_samples_per_second": 12.634, |
|
"eval_steps_per_second": 12.634, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.7545263157894737, |
|
"grad_norm": 0.8501783013343811, |
|
"learning_rate": 0.001, |
|
"loss": 1.7057, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.7562105263157894, |
|
"grad_norm": 0.9169737100601196, |
|
"learning_rate": 0.001, |
|
"loss": 2.0444, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.7578947368421053, |
|
"grad_norm": 0.8813796043395996, |
|
"learning_rate": 0.001, |
|
"loss": 1.4839, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.759578947368421, |
|
"grad_norm": 0.9467214345932007, |
|
"learning_rate": 0.001, |
|
"loss": 1.8931, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.7612631578947369, |
|
"grad_norm": 0.8730418086051941, |
|
"learning_rate": 0.001, |
|
"loss": 1.7104, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.7629473684210526, |
|
"grad_norm": 0.8568257093429565, |
|
"learning_rate": 0.001, |
|
"loss": 1.998, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.7646315789473684, |
|
"grad_norm": 0.8972058892250061, |
|
"learning_rate": 0.001, |
|
"loss": 1.8552, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.7663157894736842, |
|
"grad_norm": 0.9764978289604187, |
|
"learning_rate": 0.001, |
|
"loss": 2.0078, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 0.9850934147834778, |
|
"learning_rate": 0.001, |
|
"loss": 1.9529, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.7696842105263157, |
|
"grad_norm": 0.8368688225746155, |
|
"learning_rate": 0.001, |
|
"loss": 2.1118, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.7713684210526316, |
|
"grad_norm": 0.9366937875747681, |
|
"learning_rate": 0.001, |
|
"loss": 1.7256, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.7730526315789473, |
|
"grad_norm": 0.9020842909812927, |
|
"learning_rate": 0.001, |
|
"loss": 1.697, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.7747368421052632, |
|
"grad_norm": 0.9683967232704163, |
|
"learning_rate": 0.001, |
|
"loss": 1.8288, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.7764210526315789, |
|
"grad_norm": 0.926777720451355, |
|
"learning_rate": 0.001, |
|
"loss": 1.7249, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.7781052631578947, |
|
"grad_norm": 1.0532008409500122, |
|
"learning_rate": 0.001, |
|
"loss": 2.3132, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.7797894736842105, |
|
"grad_norm": 0.8814186453819275, |
|
"learning_rate": 0.001, |
|
"loss": 1.9781, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.7814736842105263, |
|
"grad_norm": 0.78118896484375, |
|
"learning_rate": 0.001, |
|
"loss": 1.7407, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.783157894736842, |
|
"grad_norm": 0.9273678064346313, |
|
"learning_rate": 0.001, |
|
"loss": 2.2367, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.7848421052631579, |
|
"grad_norm": 0.9391574263572693, |
|
"learning_rate": 0.001, |
|
"loss": 1.9642, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.7865263157894736, |
|
"grad_norm": 1.0936115980148315, |
|
"learning_rate": 0.001, |
|
"loss": 2.2852, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.7882105263157895, |
|
"grad_norm": 1.0484883785247803, |
|
"learning_rate": 0.001, |
|
"loss": 1.8051, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.7898947368421053, |
|
"grad_norm": 1.0337159633636475, |
|
"learning_rate": 0.001, |
|
"loss": 2.0115, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.791578947368421, |
|
"grad_norm": 0.8993913531303406, |
|
"learning_rate": 0.001, |
|
"loss": 1.7269, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.7932631578947369, |
|
"grad_norm": 0.8433123826980591, |
|
"learning_rate": 0.001, |
|
"loss": 2.0798, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.7949473684210526, |
|
"grad_norm": 0.9056971669197083, |
|
"learning_rate": 0.001, |
|
"loss": 2.3624, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.7966315789473685, |
|
"grad_norm": 0.8916146159172058, |
|
"learning_rate": 0.001, |
|
"loss": 1.6266, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.7983157894736842, |
|
"grad_norm": 0.8712090253829956, |
|
"learning_rate": 0.001, |
|
"loss": 2.2228, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8916635513305664, |
|
"learning_rate": 0.001, |
|
"loss": 1.9197, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8016842105263158, |
|
"grad_norm": 0.9295555949211121, |
|
"learning_rate": 0.001, |
|
"loss": 1.7184, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.8033684210526316, |
|
"grad_norm": 0.8718081116676331, |
|
"learning_rate": 0.001, |
|
"loss": 1.9394, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.8050526315789474, |
|
"grad_norm": 1.0214195251464844, |
|
"learning_rate": 0.001, |
|
"loss": 1.8555, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.8067368421052632, |
|
"grad_norm": 0.9039567708969116, |
|
"learning_rate": 0.001, |
|
"loss": 2.0961, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.8084210526315789, |
|
"grad_norm": 1.088826298713684, |
|
"learning_rate": 0.001, |
|
"loss": 2.1166, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8101052631578948, |
|
"grad_norm": 0.9610921144485474, |
|
"learning_rate": 0.001, |
|
"loss": 2.1177, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.8117894736842105, |
|
"grad_norm": 0.8916026949882507, |
|
"learning_rate": 0.001, |
|
"loss": 1.9126, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.8134736842105263, |
|
"grad_norm": 0.9478291273117065, |
|
"learning_rate": 0.001, |
|
"loss": 2.0522, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.8151578947368421, |
|
"grad_norm": 1.1988763809204102, |
|
"learning_rate": 0.001, |
|
"loss": 2.1891, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.8168421052631579, |
|
"grad_norm": 1.06550133228302, |
|
"learning_rate": 0.001, |
|
"loss": 2.0575, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.8185263157894737, |
|
"grad_norm": 1.0411326885223389, |
|
"learning_rate": 0.001, |
|
"loss": 2.2342, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.8202105263157895, |
|
"grad_norm": 0.996446967124939, |
|
"learning_rate": 0.001, |
|
"loss": 2.3195, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.8218947368421052, |
|
"grad_norm": 0.9175570011138916, |
|
"learning_rate": 0.001, |
|
"loss": 1.9672, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.8235789473684211, |
|
"grad_norm": 0.9615358710289001, |
|
"learning_rate": 0.001, |
|
"loss": 1.7088, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.8252631578947368, |
|
"grad_norm": 1.0030488967895508, |
|
"learning_rate": 0.001, |
|
"loss": 1.9093, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8269473684210527, |
|
"grad_norm": 0.8371875882148743, |
|
"learning_rate": 0.001, |
|
"loss": 2.0981, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.8286315789473684, |
|
"grad_norm": 1.0094941854476929, |
|
"learning_rate": 0.001, |
|
"loss": 1.9831, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.8303157894736842, |
|
"grad_norm": 0.6985566020011902, |
|
"learning_rate": 0.001, |
|
"loss": 1.481, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.832, |
|
"grad_norm": 0.8714520335197449, |
|
"learning_rate": 0.001, |
|
"loss": 1.8478, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.8336842105263158, |
|
"grad_norm": 0.9632995128631592, |
|
"learning_rate": 0.001, |
|
"loss": 1.9434, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.8353684210526315, |
|
"grad_norm": 0.989193320274353, |
|
"learning_rate": 0.001, |
|
"loss": 2.1274, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.8370526315789474, |
|
"grad_norm": 0.9159345626831055, |
|
"learning_rate": 0.001, |
|
"loss": 2.0572, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.8387368421052631, |
|
"grad_norm": 0.8769975304603577, |
|
"learning_rate": 0.001, |
|
"loss": 1.6761, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.840421052631579, |
|
"grad_norm": 1.0717746019363403, |
|
"learning_rate": 0.001, |
|
"loss": 1.964, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.8421052631578947, |
|
"grad_norm": 1.0184441804885864, |
|
"learning_rate": 0.001, |
|
"loss": 2.0598, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.8437894736842105, |
|
"grad_norm": 1.1409798860549927, |
|
"learning_rate": 0.001, |
|
"loss": 1.57, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.8454736842105263, |
|
"grad_norm": 0.917641818523407, |
|
"learning_rate": 0.001, |
|
"loss": 1.7864, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.8471578947368421, |
|
"grad_norm": 0.9784872531890869, |
|
"learning_rate": 0.001, |
|
"loss": 2.2752, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.8488421052631578, |
|
"grad_norm": 1.012035846710205, |
|
"learning_rate": 0.001, |
|
"loss": 1.9572, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.8505263157894737, |
|
"grad_norm": 0.8496696949005127, |
|
"learning_rate": 0.001, |
|
"loss": 1.5138, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.8522105263157894, |
|
"grad_norm": 1.0498113632202148, |
|
"learning_rate": 0.001, |
|
"loss": 2.1815, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.8538947368421053, |
|
"grad_norm": 0.8694950938224792, |
|
"learning_rate": 0.001, |
|
"loss": 1.9297, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.8555789473684211, |
|
"grad_norm": 0.9349990487098694, |
|
"learning_rate": 0.001, |
|
"loss": 1.9476, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.8572631578947368, |
|
"grad_norm": 0.9136828184127808, |
|
"learning_rate": 0.001, |
|
"loss": 1.9822, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.8589473684210527, |
|
"grad_norm": 0.9763911366462708, |
|
"learning_rate": 0.001, |
|
"loss": 2.0084, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.8606315789473684, |
|
"grad_norm": 0.9221206307411194, |
|
"learning_rate": 0.001, |
|
"loss": 1.9916, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.8623157894736843, |
|
"grad_norm": 0.9817140698432922, |
|
"learning_rate": 0.001, |
|
"loss": 1.9887, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.864, |
|
"grad_norm": 1.166142463684082, |
|
"learning_rate": 0.001, |
|
"loss": 2.0389, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.8656842105263158, |
|
"grad_norm": 0.9571674466133118, |
|
"learning_rate": 0.001, |
|
"loss": 1.6596, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.8673684210526316, |
|
"grad_norm": 1.113754391670227, |
|
"learning_rate": 0.001, |
|
"loss": 1.7947, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.8690526315789474, |
|
"grad_norm": 0.8172054290771484, |
|
"learning_rate": 0.001, |
|
"loss": 2.1411, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.8707368421052631, |
|
"grad_norm": 0.7713819742202759, |
|
"learning_rate": 0.001, |
|
"loss": 1.6631, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.872421052631579, |
|
"grad_norm": 1.0675171613693237, |
|
"learning_rate": 0.001, |
|
"loss": 2.115, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.8741052631578947, |
|
"grad_norm": 0.9892959594726562, |
|
"learning_rate": 0.001, |
|
"loss": 2.0591, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.8757894736842106, |
|
"grad_norm": 1.017602562904358, |
|
"learning_rate": 0.001, |
|
"loss": 1.8098, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.8774736842105263, |
|
"grad_norm": 1.0160664319992065, |
|
"learning_rate": 0.001, |
|
"loss": 2.0635, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.8791578947368421, |
|
"grad_norm": 0.7594957947731018, |
|
"learning_rate": 0.001, |
|
"loss": 1.9662, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.8808421052631579, |
|
"grad_norm": 0.9666638374328613, |
|
"learning_rate": 0.001, |
|
"loss": 2.3076, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.8825263157894737, |
|
"grad_norm": 1.0217084884643555, |
|
"learning_rate": 0.001, |
|
"loss": 2.1043, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.8842105263157894, |
|
"grad_norm": 0.9620029330253601, |
|
"learning_rate": 0.001, |
|
"loss": 1.9836, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.8858947368421053, |
|
"grad_norm": 0.9031502604484558, |
|
"learning_rate": 0.001, |
|
"loss": 1.8648, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.887578947368421, |
|
"grad_norm": 1.0853564739227295, |
|
"learning_rate": 0.001, |
|
"loss": 2.2491, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.8892631578947369, |
|
"grad_norm": 0.850796639919281, |
|
"learning_rate": 0.001, |
|
"loss": 1.8988, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.8909473684210526, |
|
"grad_norm": 0.9813094735145569, |
|
"learning_rate": 0.001, |
|
"loss": 2.0652, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.8926315789473684, |
|
"grad_norm": 1.1805915832519531, |
|
"learning_rate": 0.001, |
|
"loss": 1.9761, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.8943157894736842, |
|
"grad_norm": 0.91289222240448, |
|
"learning_rate": 0.001, |
|
"loss": 2.2265, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.896, |
|
"grad_norm": 0.8116694092750549, |
|
"learning_rate": 0.001, |
|
"loss": 2.0667, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.8976842105263158, |
|
"grad_norm": 1.0094329118728638, |
|
"learning_rate": 0.001, |
|
"loss": 2.2099, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.8993684210526316, |
|
"grad_norm": 0.9709912538528442, |
|
"learning_rate": 0.001, |
|
"loss": 1.7337, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.9010526315789473, |
|
"grad_norm": 1.1453299522399902, |
|
"learning_rate": 0.001, |
|
"loss": 2.0997, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.9027368421052632, |
|
"grad_norm": 1.097075343132019, |
|
"learning_rate": 0.001, |
|
"loss": 2.1082, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.9044210526315789, |
|
"grad_norm": 0.9422668218612671, |
|
"learning_rate": 0.001, |
|
"loss": 2.2451, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.9061052631578947, |
|
"grad_norm": 0.8550103902816772, |
|
"learning_rate": 0.001, |
|
"loss": 1.8164, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.9077894736842105, |
|
"grad_norm": 1.0287948846817017, |
|
"learning_rate": 0.001, |
|
"loss": 1.8799, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.9094736842105263, |
|
"grad_norm": 1.0734996795654297, |
|
"learning_rate": 0.001, |
|
"loss": 2.1292, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9111578947368421, |
|
"grad_norm": 0.9072704315185547, |
|
"learning_rate": 0.001, |
|
"loss": 1.9709, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.9128421052631579, |
|
"grad_norm": 0.9148054718971252, |
|
"learning_rate": 0.001, |
|
"loss": 1.7025, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.9145263157894736, |
|
"grad_norm": 1.094970941543579, |
|
"learning_rate": 0.001, |
|
"loss": 2.0636, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.9162105263157895, |
|
"grad_norm": 0.8233382701873779, |
|
"learning_rate": 0.001, |
|
"loss": 2.0036, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.9178947368421052, |
|
"grad_norm": 0.789847731590271, |
|
"learning_rate": 0.001, |
|
"loss": 1.9641, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.919578947368421, |
|
"grad_norm": 1.1164312362670898, |
|
"learning_rate": 0.001, |
|
"loss": 2.0094, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.9212631578947369, |
|
"grad_norm": 0.9254114031791687, |
|
"learning_rate": 0.001, |
|
"loss": 2.0071, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.9229473684210526, |
|
"grad_norm": 0.772678017616272, |
|
"learning_rate": 0.001, |
|
"loss": 1.6871, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.9246315789473685, |
|
"grad_norm": 0.8702642321586609, |
|
"learning_rate": 0.001, |
|
"loss": 1.86, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.9263157894736842, |
|
"grad_norm": 0.9259825348854065, |
|
"learning_rate": 0.001, |
|
"loss": 2.2161, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.928, |
|
"grad_norm": 1.095923662185669, |
|
"learning_rate": 0.001, |
|
"loss": 2.2506, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.9296842105263158, |
|
"grad_norm": 0.8884152770042419, |
|
"learning_rate": 0.001, |
|
"loss": 1.9812, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.9313684210526316, |
|
"grad_norm": 1.201453685760498, |
|
"learning_rate": 0.001, |
|
"loss": 2.3967, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.9330526315789474, |
|
"grad_norm": 0.8036067485809326, |
|
"learning_rate": 0.001, |
|
"loss": 2.0411, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.9347368421052632, |
|
"grad_norm": 1.1204805374145508, |
|
"learning_rate": 0.001, |
|
"loss": 1.9963, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.9364210526315789, |
|
"grad_norm": 0.9753801822662354, |
|
"learning_rate": 0.001, |
|
"loss": 2.0774, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.9381052631578948, |
|
"grad_norm": 0.9044502973556519, |
|
"learning_rate": 0.001, |
|
"loss": 2.2171, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.9397894736842105, |
|
"grad_norm": 0.872684121131897, |
|
"learning_rate": 0.001, |
|
"loss": 2.1116, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.9414736842105264, |
|
"grad_norm": 0.9896319508552551, |
|
"learning_rate": 0.001, |
|
"loss": 1.9894, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.9431578947368421, |
|
"grad_norm": 0.8896704912185669, |
|
"learning_rate": 0.001, |
|
"loss": 1.83, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.9448421052631579, |
|
"grad_norm": 1.1786295175552368, |
|
"learning_rate": 0.001, |
|
"loss": 1.3999, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.9465263157894737, |
|
"grad_norm": 1.0260367393493652, |
|
"learning_rate": 0.001, |
|
"loss": 1.7856, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.9482105263157895, |
|
"grad_norm": 1.1223475933074951, |
|
"learning_rate": 0.001, |
|
"loss": 1.8988, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.9498947368421052, |
|
"grad_norm": 1.2666854858398438, |
|
"learning_rate": 0.001, |
|
"loss": 2.4208, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.9515789473684211, |
|
"grad_norm": 0.8746899366378784, |
|
"learning_rate": 0.001, |
|
"loss": 2.0621, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.9532631578947368, |
|
"grad_norm": 1.1559736728668213, |
|
"learning_rate": 0.001, |
|
"loss": 1.9889, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.9549473684210527, |
|
"grad_norm": 0.8954889178276062, |
|
"learning_rate": 0.001, |
|
"loss": 2.1674, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.9566315789473684, |
|
"grad_norm": 0.848497748374939, |
|
"learning_rate": 0.001, |
|
"loss": 2.1309, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.9583157894736842, |
|
"grad_norm": 0.9832907319068909, |
|
"learning_rate": 0.001, |
|
"loss": 1.9696, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.004216194152832, |
|
"learning_rate": 0.001, |
|
"loss": 2.1556, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.9616842105263158, |
|
"grad_norm": 1.2567979097366333, |
|
"learning_rate": 0.001, |
|
"loss": 1.925, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.9633684210526315, |
|
"grad_norm": 1.056889533996582, |
|
"learning_rate": 0.001, |
|
"loss": 1.7742, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.9650526315789474, |
|
"grad_norm": 0.7364183664321899, |
|
"learning_rate": 0.001, |
|
"loss": 1.7558, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.9667368421052631, |
|
"grad_norm": 1.1390528678894043, |
|
"learning_rate": 0.001, |
|
"loss": 1.8911, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.968421052631579, |
|
"grad_norm": 1.0370616912841797, |
|
"learning_rate": 0.001, |
|
"loss": 2.091, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.9701052631578947, |
|
"grad_norm": 0.8506829738616943, |
|
"learning_rate": 0.001, |
|
"loss": 2.2052, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.9717894736842105, |
|
"grad_norm": 0.8810485601425171, |
|
"learning_rate": 0.001, |
|
"loss": 2.2049, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.9734736842105263, |
|
"grad_norm": 1.0657389163970947, |
|
"learning_rate": 0.001, |
|
"loss": 2.1982, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.9751578947368421, |
|
"grad_norm": 0.9130502343177795, |
|
"learning_rate": 0.001, |
|
"loss": 1.7093, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.9768421052631578, |
|
"grad_norm": 0.9334926605224609, |
|
"learning_rate": 0.001, |
|
"loss": 1.8507, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.9785263157894737, |
|
"grad_norm": 0.9801141023635864, |
|
"learning_rate": 0.001, |
|
"loss": 2.0824, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.9802105263157894, |
|
"grad_norm": 1.1863903999328613, |
|
"learning_rate": 0.001, |
|
"loss": 2.0316, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.9818947368421053, |
|
"grad_norm": 0.9224819540977478, |
|
"learning_rate": 0.001, |
|
"loss": 2.1538, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.983578947368421, |
|
"grad_norm": 0.9597621560096741, |
|
"learning_rate": 0.001, |
|
"loss": 2.0054, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.9852631578947368, |
|
"grad_norm": 1.1641483306884766, |
|
"learning_rate": 0.001, |
|
"loss": 1.9722, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.9869473684210527, |
|
"grad_norm": 0.9333254098892212, |
|
"learning_rate": 0.001, |
|
"loss": 2.0654, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.9886315789473684, |
|
"grad_norm": 1.0137170553207397, |
|
"learning_rate": 0.001, |
|
"loss": 2.198, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.9903157894736843, |
|
"grad_norm": 0.9962888360023499, |
|
"learning_rate": 0.001, |
|
"loss": 1.9295, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.992, |
|
"grad_norm": 1.1276189088821411, |
|
"learning_rate": 0.001, |
|
"loss": 1.9579, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.9936842105263158, |
|
"grad_norm": 0.9350583553314209, |
|
"learning_rate": 0.001, |
|
"loss": 1.9998, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.9953684210526316, |
|
"grad_norm": 0.97312992811203, |
|
"learning_rate": 0.001, |
|
"loss": 1.7338, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.9970526315789474, |
|
"grad_norm": 1.0777076482772827, |
|
"learning_rate": 0.001, |
|
"loss": 1.7413, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.9987368421052631, |
|
"grad_norm": 1.031515121459961, |
|
"learning_rate": 0.001, |
|
"loss": 2.3266, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.9987368421052631, |
|
"step": 593, |
|
"total_flos": 3.231264518066995e+16, |
|
"train_loss": 1.9546177544038976, |
|
"train_runtime": 1901.8595, |
|
"train_samples_per_second": 4.995, |
|
"train_steps_per_second": 0.312 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 593, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 149, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 100, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.231264518066995e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|