|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 718, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.002785515320334262, |
|
"grad_norm": 51.896563217510575, |
|
"learning_rate": 9.259259259259259e-08, |
|
"loss": 1.8505, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005571030640668524, |
|
"grad_norm": 51.25443110001578, |
|
"learning_rate": 1.8518518518518518e-07, |
|
"loss": 1.7695, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.008356545961002786, |
|
"grad_norm": 50.657870592221606, |
|
"learning_rate": 2.7777777777777776e-07, |
|
"loss": 1.7422, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.011142061281337047, |
|
"grad_norm": 54.50738347091111, |
|
"learning_rate": 3.7037037037037036e-07, |
|
"loss": 1.8065, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.013927576601671309, |
|
"grad_norm": 48.15106951820052, |
|
"learning_rate": 4.6296296296296297e-07, |
|
"loss": 1.7352, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.016713091922005572, |
|
"grad_norm": 46.87929448689062, |
|
"learning_rate": 5.555555555555555e-07, |
|
"loss": 1.7283, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.019498607242339833, |
|
"grad_norm": 45.57513011413353, |
|
"learning_rate": 6.481481481481481e-07, |
|
"loss": 1.6871, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.022284122562674095, |
|
"grad_norm": 44.7755092682659, |
|
"learning_rate": 7.407407407407407e-07, |
|
"loss": 1.6768, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.025069637883008356, |
|
"grad_norm": 31.53147060959238, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 1.6429, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.027855153203342618, |
|
"grad_norm": 28.844842401227453, |
|
"learning_rate": 9.259259259259259e-07, |
|
"loss": 1.5356, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03064066852367688, |
|
"grad_norm": 17.701640273093982, |
|
"learning_rate": 1.0185185185185185e-06, |
|
"loss": 1.3174, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.033426183844011144, |
|
"grad_norm": 19.577762518427146, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 1.4815, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.036211699164345405, |
|
"grad_norm": 15.661803318546298, |
|
"learning_rate": 1.2037037037037037e-06, |
|
"loss": 1.3135, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03899721448467967, |
|
"grad_norm": 14.60109094514693, |
|
"learning_rate": 1.2962962962962962e-06, |
|
"loss": 1.2715, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04178272980501393, |
|
"grad_norm": 26.123721212763172, |
|
"learning_rate": 1.3888888888888892e-06, |
|
"loss": 1.1523, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04456824512534819, |
|
"grad_norm": 14.467035366468068, |
|
"learning_rate": 1.4814814814814815e-06, |
|
"loss": 1.0996, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04735376044568245, |
|
"grad_norm": 6.674363215000031, |
|
"learning_rate": 1.5740740740740742e-06, |
|
"loss": 0.9907, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.05013927576601671, |
|
"grad_norm": 12.055832372812148, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 1.0198, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.052924791086350974, |
|
"grad_norm": 12.945304986036527, |
|
"learning_rate": 1.7592592592592594e-06, |
|
"loss": 0.9337, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.055710306406685235, |
|
"grad_norm": 10.106412813480258, |
|
"learning_rate": 1.8518518518518519e-06, |
|
"loss": 0.919, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0584958217270195, |
|
"grad_norm": 6.781727044025352, |
|
"learning_rate": 1.944444444444445e-06, |
|
"loss": 0.8434, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.06128133704735376, |
|
"grad_norm": 8.809429202473247, |
|
"learning_rate": 2.037037037037037e-06, |
|
"loss": 0.7833, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.06406685236768803, |
|
"grad_norm": 6.688993460767229, |
|
"learning_rate": 2.1296296296296298e-06, |
|
"loss": 0.7698, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.06685236768802229, |
|
"grad_norm": 6.161972387446697, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 0.7612, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06963788300835655, |
|
"grad_norm": 6.54260778754855, |
|
"learning_rate": 2.314814814814815e-06, |
|
"loss": 0.7324, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07242339832869081, |
|
"grad_norm": 5.586611958565188, |
|
"learning_rate": 2.4074074074074075e-06, |
|
"loss": 0.7458, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.07520891364902507, |
|
"grad_norm": 4.617583560881293, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.7248, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07799442896935933, |
|
"grad_norm": 4.690727976770273, |
|
"learning_rate": 2.5925925925925925e-06, |
|
"loss": 0.6563, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0807799442896936, |
|
"grad_norm": 10.984940302977112, |
|
"learning_rate": 2.6851851851851856e-06, |
|
"loss": 0.6456, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.08356545961002786, |
|
"grad_norm": 5.473651235472079, |
|
"learning_rate": 2.7777777777777783e-06, |
|
"loss": 0.5949, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08635097493036212, |
|
"grad_norm": 10.2638959710858, |
|
"learning_rate": 2.8703703703703706e-06, |
|
"loss": 0.6007, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.08913649025069638, |
|
"grad_norm": 7.272653062800589, |
|
"learning_rate": 2.962962962962963e-06, |
|
"loss": 0.5538, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.09192200557103064, |
|
"grad_norm": 6.2308008249819755, |
|
"learning_rate": 3.055555555555556e-06, |
|
"loss": 0.6011, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0947075208913649, |
|
"grad_norm": 5.854416902792981, |
|
"learning_rate": 3.1481481481481483e-06, |
|
"loss": 0.5163, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.09749303621169916, |
|
"grad_norm": 4.249502476242808, |
|
"learning_rate": 3.240740740740741e-06, |
|
"loss": 0.5134, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.10027855153203342, |
|
"grad_norm": 5.200738988029909, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.5383, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.10306406685236769, |
|
"grad_norm": 3.6172508277695816, |
|
"learning_rate": 3.4259259259259265e-06, |
|
"loss": 0.5522, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.10584958217270195, |
|
"grad_norm": 4.995690084645839, |
|
"learning_rate": 3.5185185185185187e-06, |
|
"loss": 0.5412, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.10863509749303621, |
|
"grad_norm": 3.5813358610565635, |
|
"learning_rate": 3.6111111111111115e-06, |
|
"loss": 0.4892, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.11142061281337047, |
|
"grad_norm": 11.152551178583757, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 0.5587, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.11420612813370473, |
|
"grad_norm": 12.967773922668536, |
|
"learning_rate": 3.796296296296297e-06, |
|
"loss": 0.517, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.116991643454039, |
|
"grad_norm": 4.413123280796825, |
|
"learning_rate": 3.88888888888889e-06, |
|
"loss": 0.553, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.11977715877437325, |
|
"grad_norm": 11.761322647896323, |
|
"learning_rate": 3.9814814814814814e-06, |
|
"loss": 0.4786, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.12256267409470752, |
|
"grad_norm": 12.499931079212883, |
|
"learning_rate": 4.074074074074074e-06, |
|
"loss": 0.5201, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.12534818941504178, |
|
"grad_norm": 4.981631784638329, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.4786, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.12813370473537605, |
|
"grad_norm": 9.950350243868158, |
|
"learning_rate": 4.2592592592592596e-06, |
|
"loss": 0.4972, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.1309192200557103, |
|
"grad_norm": 13.38883748639295, |
|
"learning_rate": 4.351851851851852e-06, |
|
"loss": 0.4906, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.13370473537604458, |
|
"grad_norm": 10.235273090749509, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.473, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.13649025069637882, |
|
"grad_norm": 3.9304734391023315, |
|
"learning_rate": 4.537037037037038e-06, |
|
"loss": 0.5285, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.1392757660167131, |
|
"grad_norm": 5.891406814906414, |
|
"learning_rate": 4.62962962962963e-06, |
|
"loss": 0.4576, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.14206128133704735, |
|
"grad_norm": 4.473395043515243, |
|
"learning_rate": 4.722222222222222e-06, |
|
"loss": 0.4823, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.14484679665738162, |
|
"grad_norm": 2.4602304929393672, |
|
"learning_rate": 4.814814814814815e-06, |
|
"loss": 0.4718, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.14763231197771587, |
|
"grad_norm": 7.848743710390298, |
|
"learning_rate": 4.907407407407408e-06, |
|
"loss": 0.513, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.15041782729805014, |
|
"grad_norm": 9.829579377160304, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4687, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1532033426183844, |
|
"grad_norm": 4.541289727912357, |
|
"learning_rate": 5.092592592592593e-06, |
|
"loss": 0.4839, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.15598885793871867, |
|
"grad_norm": 4.05707710888591, |
|
"learning_rate": 5.185185185185185e-06, |
|
"loss": 0.4746, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.15877437325905291, |
|
"grad_norm": 7.681345724438966, |
|
"learning_rate": 5.2777777777777785e-06, |
|
"loss": 0.4785, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.1615598885793872, |
|
"grad_norm": 4.787986234617247, |
|
"learning_rate": 5.370370370370371e-06, |
|
"loss": 0.4886, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.16434540389972144, |
|
"grad_norm": 4.318777468233646, |
|
"learning_rate": 5.462962962962963e-06, |
|
"loss": 0.4989, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.1671309192200557, |
|
"grad_norm": 2.6826923246620797, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 0.4751, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.16991643454038996, |
|
"grad_norm": 3.7803024323754495, |
|
"learning_rate": 5.6481481481481485e-06, |
|
"loss": 0.4679, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.17270194986072424, |
|
"grad_norm": 3.381738320298815, |
|
"learning_rate": 5.740740740740741e-06, |
|
"loss": 0.4706, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.17548746518105848, |
|
"grad_norm": 3.237855353416699, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 0.4414, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.17827298050139276, |
|
"grad_norm": 3.957459982374311, |
|
"learning_rate": 5.925925925925926e-06, |
|
"loss": 0.4803, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.181058495821727, |
|
"grad_norm": 3.7764420420882674, |
|
"learning_rate": 6.018518518518519e-06, |
|
"loss": 0.5091, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.18384401114206128, |
|
"grad_norm": 5.894287288034545, |
|
"learning_rate": 6.111111111111112e-06, |
|
"loss": 0.4793, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.18662952646239556, |
|
"grad_norm": 4.88479881884403, |
|
"learning_rate": 6.203703703703704e-06, |
|
"loss": 0.4839, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.1894150417827298, |
|
"grad_norm": 3.054097102372765, |
|
"learning_rate": 6.296296296296297e-06, |
|
"loss": 0.4709, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.19220055710306408, |
|
"grad_norm": 4.705541912254048, |
|
"learning_rate": 6.3888888888888885e-06, |
|
"loss": 0.4663, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.19498607242339833, |
|
"grad_norm": 2.895747808875905, |
|
"learning_rate": 6.481481481481482e-06, |
|
"loss": 0.4584, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1977715877437326, |
|
"grad_norm": 4.5229305928690575, |
|
"learning_rate": 6.574074074074075e-06, |
|
"loss": 0.4947, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.20055710306406685, |
|
"grad_norm": 5.916813441790228, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.4712, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.20334261838440112, |
|
"grad_norm": 7.122681577218259, |
|
"learning_rate": 6.75925925925926e-06, |
|
"loss": 0.4629, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.20612813370473537, |
|
"grad_norm": 5.892373474625553, |
|
"learning_rate": 6.851851851851853e-06, |
|
"loss": 0.4726, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.20891364902506965, |
|
"grad_norm": 4.529037806011145, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 0.4284, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2116991643454039, |
|
"grad_norm": 5.114165747594991, |
|
"learning_rate": 7.0370370370370375e-06, |
|
"loss": 0.4887, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.21448467966573817, |
|
"grad_norm": 4.904502614917597, |
|
"learning_rate": 7.129629629629629e-06, |
|
"loss": 0.4608, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.21727019498607242, |
|
"grad_norm": 4.13683133131575, |
|
"learning_rate": 7.222222222222223e-06, |
|
"loss": 0.4549, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.2200557103064067, |
|
"grad_norm": 5.40517360239056, |
|
"learning_rate": 7.314814814814816e-06, |
|
"loss": 0.4902, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.22284122562674094, |
|
"grad_norm": 4.02573448200883, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 0.4395, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.22562674094707522, |
|
"grad_norm": 3.2283387729080166, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.4437, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.22841225626740946, |
|
"grad_norm": 3.8600453249494846, |
|
"learning_rate": 7.592592592592594e-06, |
|
"loss": 0.4231, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.23119777158774374, |
|
"grad_norm": 2.156675686386554, |
|
"learning_rate": 7.685185185185185e-06, |
|
"loss": 0.461, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.233983286908078, |
|
"grad_norm": 2.5840861162453277, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.4385, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.23676880222841226, |
|
"grad_norm": 3.197694640437121, |
|
"learning_rate": 7.870370370370372e-06, |
|
"loss": 0.4487, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2395543175487465, |
|
"grad_norm": 4.328756980685079, |
|
"learning_rate": 7.962962962962963e-06, |
|
"loss": 0.4472, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.24233983286908078, |
|
"grad_norm": 4.128057324251598, |
|
"learning_rate": 8.055555555555557e-06, |
|
"loss": 0.4536, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.24512534818941503, |
|
"grad_norm": 3.2594096309245844, |
|
"learning_rate": 8.148148148148148e-06, |
|
"loss": 0.4541, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.2479108635097493, |
|
"grad_norm": 3.7073660748092943, |
|
"learning_rate": 8.240740740740741e-06, |
|
"loss": 0.4206, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.25069637883008355, |
|
"grad_norm": 3.1944515592118616, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.4336, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.25348189415041783, |
|
"grad_norm": 2.74283755239211, |
|
"learning_rate": 8.425925925925926e-06, |
|
"loss": 0.4361, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2562674094707521, |
|
"grad_norm": 2.8007467423126773, |
|
"learning_rate": 8.518518518518519e-06, |
|
"loss": 0.4338, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.2590529247910863, |
|
"grad_norm": 3.5078727104632272, |
|
"learning_rate": 8.611111111111112e-06, |
|
"loss": 0.4774, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.2618384401114206, |
|
"grad_norm": 3.4570650071682283, |
|
"learning_rate": 8.703703703703705e-06, |
|
"loss": 0.4776, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2646239554317549, |
|
"grad_norm": 3.5095193067757897, |
|
"learning_rate": 8.796296296296297e-06, |
|
"loss": 0.4554, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.26740947075208915, |
|
"grad_norm": 4.3716247000609325, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.4622, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.27019498607242337, |
|
"grad_norm": 3.6797447867301356, |
|
"learning_rate": 8.981481481481483e-06, |
|
"loss": 0.4369, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.27298050139275765, |
|
"grad_norm": 5.53680301109746, |
|
"learning_rate": 9.074074074074075e-06, |
|
"loss": 0.4457, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.2757660167130919, |
|
"grad_norm": 5.9694427246661625, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 0.4555, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.2785515320334262, |
|
"grad_norm": 2.636956037594158, |
|
"learning_rate": 9.25925925925926e-06, |
|
"loss": 0.4645, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.28133704735376047, |
|
"grad_norm": 4.834060779825191, |
|
"learning_rate": 9.351851851851854e-06, |
|
"loss": 0.4451, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.2841225626740947, |
|
"grad_norm": 3.6013502926306225, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 0.449, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.28690807799442897, |
|
"grad_norm": 2.3930475552275303, |
|
"learning_rate": 9.537037037037037e-06, |
|
"loss": 0.4086, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.28969359331476324, |
|
"grad_norm": 3.7667066909749876, |
|
"learning_rate": 9.62962962962963e-06, |
|
"loss": 0.451, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.2924791086350975, |
|
"grad_norm": 4.786178741004431, |
|
"learning_rate": 9.722222222222223e-06, |
|
"loss": 0.4308, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.29526462395543174, |
|
"grad_norm": 2.4645208382320347, |
|
"learning_rate": 9.814814814814815e-06, |
|
"loss": 0.4313, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.298050139275766, |
|
"grad_norm": 4.798357978396935, |
|
"learning_rate": 9.907407407407408e-06, |
|
"loss": 0.4006, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.3008356545961003, |
|
"grad_norm": 2.239558601213019, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4077, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.30362116991643456, |
|
"grad_norm": 6.067706007201533, |
|
"learning_rate": 9.999973722029575e-06, |
|
"loss": 0.4804, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.3064066852367688, |
|
"grad_norm": 2.2508629024327163, |
|
"learning_rate": 9.999894888394505e-06, |
|
"loss": 0.4356, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.30919220055710306, |
|
"grad_norm": 5.1774440744985775, |
|
"learning_rate": 9.999763499923432e-06, |
|
"loss": 0.4643, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.31197771587743733, |
|
"grad_norm": 7.157771342170821, |
|
"learning_rate": 9.999579557997402e-06, |
|
"loss": 0.4481, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.3147632311977716, |
|
"grad_norm": 4.17559774946349, |
|
"learning_rate": 9.999343064549862e-06, |
|
"loss": 0.4441, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.31754874651810583, |
|
"grad_norm": 4.658144419836312, |
|
"learning_rate": 9.999054022066643e-06, |
|
"loss": 0.4417, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.3203342618384401, |
|
"grad_norm": 4.327618564784681, |
|
"learning_rate": 9.998712433585919e-06, |
|
"loss": 0.4218, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.3231197771587744, |
|
"grad_norm": 6.815079792886309, |
|
"learning_rate": 9.998318302698198e-06, |
|
"loss": 0.4406, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.32590529247910865, |
|
"grad_norm": 6.094510293492715, |
|
"learning_rate": 9.997871633546257e-06, |
|
"loss": 0.4544, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.3286908077994429, |
|
"grad_norm": 4.503287942396959, |
|
"learning_rate": 9.997372430825125e-06, |
|
"loss": 0.4231, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.33147632311977715, |
|
"grad_norm": 4.861313404736187, |
|
"learning_rate": 9.99682069978201e-06, |
|
"loss": 0.4381, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3342618384401114, |
|
"grad_norm": 2.7683971117271526, |
|
"learning_rate": 9.996216446216267e-06, |
|
"loss": 0.4409, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3370473537604457, |
|
"grad_norm": 7.444096900392287, |
|
"learning_rate": 9.995559676479317e-06, |
|
"loss": 0.4095, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3398328690807799, |
|
"grad_norm": 7.999395347247279, |
|
"learning_rate": 9.994850397474588e-06, |
|
"loss": 0.4623, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.3426183844011142, |
|
"grad_norm": 2.2946622379096158, |
|
"learning_rate": 9.994088616657445e-06, |
|
"loss": 0.4105, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.34540389972144847, |
|
"grad_norm": 2.0758020467431293, |
|
"learning_rate": 9.993274342035111e-06, |
|
"loss": 0.4124, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.34818941504178275, |
|
"grad_norm": 2.6058926668026134, |
|
"learning_rate": 9.992407582166582e-06, |
|
"loss": 0.4242, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.35097493036211697, |
|
"grad_norm": 2.615125718538418, |
|
"learning_rate": 9.99148834616253e-06, |
|
"loss": 0.4365, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.35376044568245124, |
|
"grad_norm": 5.092308170043417, |
|
"learning_rate": 9.990516643685222e-06, |
|
"loss": 0.4942, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.3565459610027855, |
|
"grad_norm": 1.7401489925535345, |
|
"learning_rate": 9.9894924849484e-06, |
|
"loss": 0.4206, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.3593314763231198, |
|
"grad_norm": 2.7321121823109653, |
|
"learning_rate": 9.988415880717195e-06, |
|
"loss": 0.4437, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.362116991643454, |
|
"grad_norm": 5.069168456453674, |
|
"learning_rate": 9.987286842307991e-06, |
|
"loss": 0.4382, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3649025069637883, |
|
"grad_norm": 3.4693225909922383, |
|
"learning_rate": 9.986105381588329e-06, |
|
"loss": 0.4406, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.36768802228412256, |
|
"grad_norm": 2.2075063476184833, |
|
"learning_rate": 9.98487151097676e-06, |
|
"loss": 0.4219, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.37047353760445684, |
|
"grad_norm": 9.994530338531517, |
|
"learning_rate": 9.983585243442733e-06, |
|
"loss": 0.4341, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.3732590529247911, |
|
"grad_norm": 8.08431530054485, |
|
"learning_rate": 9.982246592506446e-06, |
|
"loss": 0.436, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.37604456824512533, |
|
"grad_norm": 4.240362643934863, |
|
"learning_rate": 9.980855572238715e-06, |
|
"loss": 0.4112, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.3788300835654596, |
|
"grad_norm": 3.2009164344694443, |
|
"learning_rate": 9.979412197260811e-06, |
|
"loss": 0.4268, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.3816155988857939, |
|
"grad_norm": 4.030087621292174, |
|
"learning_rate": 9.977916482744323e-06, |
|
"loss": 0.4537, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.38440111420612816, |
|
"grad_norm": 5.3537743428136055, |
|
"learning_rate": 9.976368444410985e-06, |
|
"loss": 0.4114, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.3871866295264624, |
|
"grad_norm": 2.7867069381197656, |
|
"learning_rate": 9.974768098532521e-06, |
|
"loss": 0.4193, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.38997214484679665, |
|
"grad_norm": 7.703498230545111, |
|
"learning_rate": 9.973115461930469e-06, |
|
"loss": 0.42, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.39275766016713093, |
|
"grad_norm": 8.077152574159665, |
|
"learning_rate": 9.971410551976001e-06, |
|
"loss": 0.4577, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.3955431754874652, |
|
"grad_norm": 3.07530751230875, |
|
"learning_rate": 9.969653386589749e-06, |
|
"loss": 0.4183, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.3983286908077994, |
|
"grad_norm": 2.7626250447391523, |
|
"learning_rate": 9.967843984241606e-06, |
|
"loss": 0.3896, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.4011142061281337, |
|
"grad_norm": 3.975481440362994, |
|
"learning_rate": 9.96598236395054e-06, |
|
"loss": 0.4239, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.403899721448468, |
|
"grad_norm": 1.965257324256198, |
|
"learning_rate": 9.964068545284396e-06, |
|
"loss": 0.4127, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.40668523676880225, |
|
"grad_norm": 3.0809014250316022, |
|
"learning_rate": 9.96210254835968e-06, |
|
"loss": 0.4017, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.40947075208913647, |
|
"grad_norm": 2.9104967654078124, |
|
"learning_rate": 9.960084393841355e-06, |
|
"loss": 0.4086, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.41225626740947074, |
|
"grad_norm": 6.317450294607272, |
|
"learning_rate": 9.958014102942623e-06, |
|
"loss": 0.4551, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.415041782729805, |
|
"grad_norm": 6.717664733426921, |
|
"learning_rate": 9.955891697424704e-06, |
|
"loss": 0.4279, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.4178272980501393, |
|
"grad_norm": 3.9292137959030224, |
|
"learning_rate": 9.953717199596598e-06, |
|
"loss": 0.3874, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.4206128133704735, |
|
"grad_norm": 9.649508648086734, |
|
"learning_rate": 9.951490632314863e-06, |
|
"loss": 0.4708, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.4233983286908078, |
|
"grad_norm": 9.508144677662775, |
|
"learning_rate": 9.949212018983366e-06, |
|
"loss": 0.4225, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.42618384401114207, |
|
"grad_norm": 5.0894520070321, |
|
"learning_rate": 9.94688138355304e-06, |
|
"loss": 0.436, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.42896935933147634, |
|
"grad_norm": 6.448867991588042, |
|
"learning_rate": 9.944498750521634e-06, |
|
"loss": 0.4145, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.43175487465181056, |
|
"grad_norm": 5.421064748705665, |
|
"learning_rate": 9.94206414493345e-06, |
|
"loss": 0.4222, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.43454038997214484, |
|
"grad_norm": 5.438995230525324, |
|
"learning_rate": 9.939577592379088e-06, |
|
"loss": 0.4305, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.4373259052924791, |
|
"grad_norm": 5.370429829876015, |
|
"learning_rate": 9.93703911899517e-06, |
|
"loss": 0.4049, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.4401114206128134, |
|
"grad_norm": 2.5242952466757798, |
|
"learning_rate": 9.934448751464064e-06, |
|
"loss": 0.3938, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.4428969359331476, |
|
"grad_norm": 3.3434878177442555, |
|
"learning_rate": 9.931806517013612e-06, |
|
"loss": 0.4334, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.4456824512534819, |
|
"grad_norm": 2.53930414952446, |
|
"learning_rate": 9.92911244341684e-06, |
|
"loss": 0.4322, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.44846796657381616, |
|
"grad_norm": 6.553033171905046, |
|
"learning_rate": 9.926366558991659e-06, |
|
"loss": 0.4425, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.45125348189415043, |
|
"grad_norm": 5.669950322393376, |
|
"learning_rate": 9.923568892600579e-06, |
|
"loss": 0.4249, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.45403899721448465, |
|
"grad_norm": 2.194603245303184, |
|
"learning_rate": 9.920719473650397e-06, |
|
"loss": 0.4167, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.4568245125348189, |
|
"grad_norm": 2.9437930780615136, |
|
"learning_rate": 9.917818332091892e-06, |
|
"loss": 0.4167, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.4596100278551532, |
|
"grad_norm": 1.7936960137918836, |
|
"learning_rate": 9.91486549841951e-06, |
|
"loss": 0.4307, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.4623955431754875, |
|
"grad_norm": 3.4597191396058813, |
|
"learning_rate": 9.91186100367104e-06, |
|
"loss": 0.4253, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.46518105849582175, |
|
"grad_norm": 2.5908696822259194, |
|
"learning_rate": 9.90880487942729e-06, |
|
"loss": 0.4516, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.467966573816156, |
|
"grad_norm": 1.963082249210851, |
|
"learning_rate": 9.905697157811761e-06, |
|
"loss": 0.3719, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.47075208913649025, |
|
"grad_norm": 1.831851357062636, |
|
"learning_rate": 9.902537871490297e-06, |
|
"loss": 0.4549, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.4735376044568245, |
|
"grad_norm": 2.9888539685235758, |
|
"learning_rate": 9.899327053670751e-06, |
|
"loss": 0.3922, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.4763231197771588, |
|
"grad_norm": 3.075638975463272, |
|
"learning_rate": 9.896064738102635e-06, |
|
"loss": 0.3936, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.479108635097493, |
|
"grad_norm": 5.0835449170924445, |
|
"learning_rate": 9.89275095907676e-06, |
|
"loss": 0.3841, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.4818941504178273, |
|
"grad_norm": 1.9661770231549989, |
|
"learning_rate": 9.889385751424882e-06, |
|
"loss": 0.4137, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.48467966573816157, |
|
"grad_norm": 3.9632879066265354, |
|
"learning_rate": 9.885969150519332e-06, |
|
"loss": 0.424, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.48746518105849584, |
|
"grad_norm": 1.5976755516140246, |
|
"learning_rate": 9.882501192272642e-06, |
|
"loss": 0.3802, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.49025069637883006, |
|
"grad_norm": 3.1107170523974523, |
|
"learning_rate": 9.878981913137178e-06, |
|
"loss": 0.432, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.49303621169916434, |
|
"grad_norm": 2.556665621671862, |
|
"learning_rate": 9.875411350104745e-06, |
|
"loss": 0.3948, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.4958217270194986, |
|
"grad_norm": 2.5794455002701024, |
|
"learning_rate": 9.8717895407062e-06, |
|
"loss": 0.4502, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.4986072423398329, |
|
"grad_norm": 4.677141503858308, |
|
"learning_rate": 9.868116523011063e-06, |
|
"loss": 0.4497, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.5013927576601671, |
|
"grad_norm": 4.671061355481848, |
|
"learning_rate": 9.864392335627118e-06, |
|
"loss": 0.4347, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5041782729805014, |
|
"grad_norm": 3.370942228941731, |
|
"learning_rate": 9.860617017699993e-06, |
|
"loss": 0.39, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.5069637883008357, |
|
"grad_norm": 3.9914824460208584, |
|
"learning_rate": 9.856790608912775e-06, |
|
"loss": 0.4118, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.5097493036211699, |
|
"grad_norm": 5.849739268274225, |
|
"learning_rate": 9.852913149485556e-06, |
|
"loss": 0.4157, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.5125348189415042, |
|
"grad_norm": 3.0708790936689825, |
|
"learning_rate": 9.848984680175049e-06, |
|
"loss": 0.4248, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.5153203342618384, |
|
"grad_norm": 4.795336850693523, |
|
"learning_rate": 9.84500524227413e-06, |
|
"loss": 0.3962, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.5181058495821727, |
|
"grad_norm": 5.255261240208525, |
|
"learning_rate": 9.840974877611423e-06, |
|
"loss": 0.4167, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.520891364902507, |
|
"grad_norm": 2.707345636214946, |
|
"learning_rate": 9.836893628550846e-06, |
|
"loss": 0.4157, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.5236768802228412, |
|
"grad_norm": 2.176390492101013, |
|
"learning_rate": 9.832761537991177e-06, |
|
"loss": 0.4296, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.5264623955431755, |
|
"grad_norm": 3.757606670960894, |
|
"learning_rate": 9.8285786493656e-06, |
|
"loss": 0.3982, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.5292479108635098, |
|
"grad_norm": 2.6063228766710878, |
|
"learning_rate": 9.824345006641243e-06, |
|
"loss": 0.4112, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.532033426183844, |
|
"grad_norm": 2.1087605571728343, |
|
"learning_rate": 9.820060654318718e-06, |
|
"loss": 0.4264, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.5348189415041783, |
|
"grad_norm": 3.5515430080279384, |
|
"learning_rate": 9.815725637431663e-06, |
|
"loss": 0.4155, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.5376044568245125, |
|
"grad_norm": 3.807679083830799, |
|
"learning_rate": 9.811340001546252e-06, |
|
"loss": 0.3852, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.5403899721448467, |
|
"grad_norm": 3.99926299961232, |
|
"learning_rate": 9.806903792760733e-06, |
|
"loss": 0.4077, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.5431754874651811, |
|
"grad_norm": 5.0918302218081095, |
|
"learning_rate": 9.80241705770493e-06, |
|
"loss": 0.3899, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5459610027855153, |
|
"grad_norm": 1.9007122969036225, |
|
"learning_rate": 9.797879843539759e-06, |
|
"loss": 0.4184, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.5487465181058496, |
|
"grad_norm": 3.965245191949125, |
|
"learning_rate": 9.793292197956732e-06, |
|
"loss": 0.4453, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.5515320334261838, |
|
"grad_norm": 6.5943441571922605, |
|
"learning_rate": 9.788654169177454e-06, |
|
"loss": 0.4171, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.5543175487465181, |
|
"grad_norm": 2.049441394869484, |
|
"learning_rate": 9.78396580595312e-06, |
|
"loss": 0.3988, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.5571030640668524, |
|
"grad_norm": 3.4889331162189094, |
|
"learning_rate": 9.779227157563998e-06, |
|
"loss": 0.397, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5598885793871866, |
|
"grad_norm": 3.062679189465898, |
|
"learning_rate": 9.77443827381891e-06, |
|
"loss": 0.4051, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.5626740947075209, |
|
"grad_norm": 2.490650203198882, |
|
"learning_rate": 9.76959920505472e-06, |
|
"loss": 0.3988, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.5654596100278552, |
|
"grad_norm": 2.7095283036801, |
|
"learning_rate": 9.764710002135784e-06, |
|
"loss": 0.3559, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.5682451253481894, |
|
"grad_norm": 3.950667422664387, |
|
"learning_rate": 9.759770716453436e-06, |
|
"loss": 0.4228, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.5710306406685237, |
|
"grad_norm": 3.1925902617328425, |
|
"learning_rate": 9.754781399925439e-06, |
|
"loss": 0.4099, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.5738161559888579, |
|
"grad_norm": 4.252961013390271, |
|
"learning_rate": 9.749742104995437e-06, |
|
"loss": 0.3847, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.5766016713091922, |
|
"grad_norm": 4.859290271508331, |
|
"learning_rate": 9.744652884632406e-06, |
|
"loss": 0.4004, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.5793871866295265, |
|
"grad_norm": 4.0375400028300445, |
|
"learning_rate": 9.7395137923301e-06, |
|
"loss": 0.3811, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.5821727019498607, |
|
"grad_norm": 4.834487992128958, |
|
"learning_rate": 9.734324882106486e-06, |
|
"loss": 0.4059, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.584958217270195, |
|
"grad_norm": 6.316821608798941, |
|
"learning_rate": 9.729086208503174e-06, |
|
"loss": 0.4091, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.5877437325905293, |
|
"grad_norm": 4.5197566056331615, |
|
"learning_rate": 9.723797826584849e-06, |
|
"loss": 0.4639, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.5905292479108635, |
|
"grad_norm": 1.8180543210195619, |
|
"learning_rate": 9.718459791938688e-06, |
|
"loss": 0.4226, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.5933147632311978, |
|
"grad_norm": 4.343688512601359, |
|
"learning_rate": 9.713072160673778e-06, |
|
"loss": 0.3949, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.596100278551532, |
|
"grad_norm": 4.768178257275066, |
|
"learning_rate": 9.707634989420525e-06, |
|
"loss": 0.3733, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.5988857938718662, |
|
"grad_norm": 2.2485510232806503, |
|
"learning_rate": 9.702148335330059e-06, |
|
"loss": 0.397, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.6016713091922006, |
|
"grad_norm": 4.76514100397773, |
|
"learning_rate": 9.696612256073634e-06, |
|
"loss": 0.4138, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.6044568245125348, |
|
"grad_norm": 7.0809089001428225, |
|
"learning_rate": 9.691026809842021e-06, |
|
"loss": 0.4185, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.6072423398328691, |
|
"grad_norm": 4.560160681561198, |
|
"learning_rate": 9.685392055344894e-06, |
|
"loss": 0.3874, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.6100278551532033, |
|
"grad_norm": 3.275937373036597, |
|
"learning_rate": 9.679708051810222e-06, |
|
"loss": 0.4001, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.6128133704735376, |
|
"grad_norm": 3.793117526085643, |
|
"learning_rate": 9.673974858983632e-06, |
|
"loss": 0.4057, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6155988857938719, |
|
"grad_norm": 1.8061140172013403, |
|
"learning_rate": 9.668192537127793e-06, |
|
"loss": 0.4095, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.6183844011142061, |
|
"grad_norm": 1.8798104198190673, |
|
"learning_rate": 9.66236114702178e-06, |
|
"loss": 0.382, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.6211699164345403, |
|
"grad_norm": 3.7044730506679353, |
|
"learning_rate": 9.65648074996043e-06, |
|
"loss": 0.3624, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.6239554317548747, |
|
"grad_norm": 1.975687448577224, |
|
"learning_rate": 9.650551407753705e-06, |
|
"loss": 0.4659, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.6267409470752089, |
|
"grad_norm": 2.6767074438671847, |
|
"learning_rate": 9.644573182726035e-06, |
|
"loss": 0.3829, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.6295264623955432, |
|
"grad_norm": 2.279077436020356, |
|
"learning_rate": 9.638546137715668e-06, |
|
"loss": 0.3727, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.6323119777158774, |
|
"grad_norm": 1.7774159989407174, |
|
"learning_rate": 9.632470336074009e-06, |
|
"loss": 0.3896, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.6350974930362117, |
|
"grad_norm": 1.4441979240819856, |
|
"learning_rate": 9.626345841664953e-06, |
|
"loss": 0.3936, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.637883008356546, |
|
"grad_norm": 3.6530381556322884, |
|
"learning_rate": 9.620172718864213e-06, |
|
"loss": 0.425, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.6406685236768802, |
|
"grad_norm": 3.3791011517007132, |
|
"learning_rate": 9.613951032558641e-06, |
|
"loss": 0.391, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.6434540389972145, |
|
"grad_norm": 2.057870004474059, |
|
"learning_rate": 9.607680848145557e-06, |
|
"loss": 0.3851, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.6462395543175488, |
|
"grad_norm": 3.807733615581065, |
|
"learning_rate": 9.601362231532047e-06, |
|
"loss": 0.3689, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.649025069637883, |
|
"grad_norm": 2.1375435240527074, |
|
"learning_rate": 9.59499524913428e-06, |
|
"loss": 0.4176, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.6518105849582173, |
|
"grad_norm": 3.2392068690949123, |
|
"learning_rate": 9.588579967876806e-06, |
|
"loss": 0.4291, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.6545961002785515, |
|
"grad_norm": 4.152125319902448, |
|
"learning_rate": 9.582116455191855e-06, |
|
"loss": 0.4443, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.6573816155988857, |
|
"grad_norm": 1.5482496921468176, |
|
"learning_rate": 9.57560477901862e-06, |
|
"loss": 0.3865, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.6601671309192201, |
|
"grad_norm": 1.614155034801863, |
|
"learning_rate": 9.569045007802558e-06, |
|
"loss": 0.4088, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.6629526462395543, |
|
"grad_norm": 1.923084765098344, |
|
"learning_rate": 9.56243721049466e-06, |
|
"loss": 0.4315, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.6657381615598886, |
|
"grad_norm": 1.675934603997266, |
|
"learning_rate": 9.555781456550725e-06, |
|
"loss": 0.415, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.6685236768802229, |
|
"grad_norm": 3.1002823561719572, |
|
"learning_rate": 9.549077815930636e-06, |
|
"loss": 0.3925, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.6713091922005571, |
|
"grad_norm": 3.2211613666558665, |
|
"learning_rate": 9.542326359097619e-06, |
|
"loss": 0.3771, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.6740947075208914, |
|
"grad_norm": 2.0152027050214647, |
|
"learning_rate": 9.53552715701751e-06, |
|
"loss": 0.3917, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.6768802228412256, |
|
"grad_norm": 4.4317259369834465, |
|
"learning_rate": 9.528680281157999e-06, |
|
"loss": 0.4118, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.6796657381615598, |
|
"grad_norm": 4.6272199455261305, |
|
"learning_rate": 9.521785803487888e-06, |
|
"loss": 0.3952, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.6824512534818942, |
|
"grad_norm": 2.0818684393524967, |
|
"learning_rate": 9.514843796476329e-06, |
|
"loss": 0.4204, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.6852367688022284, |
|
"grad_norm": 5.64571883219211, |
|
"learning_rate": 9.507854333092064e-06, |
|
"loss": 0.4121, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.6880222841225627, |
|
"grad_norm": 5.490200310838687, |
|
"learning_rate": 9.500817486802658e-06, |
|
"loss": 0.4202, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.6908077994428969, |
|
"grad_norm": 3.7101288173296845, |
|
"learning_rate": 9.493733331573724e-06, |
|
"loss": 0.3737, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.6935933147632312, |
|
"grad_norm": 2.2924331502523376, |
|
"learning_rate": 9.486601941868155e-06, |
|
"loss": 0.366, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.6963788300835655, |
|
"grad_norm": 3.079156967871073, |
|
"learning_rate": 9.479423392645327e-06, |
|
"loss": 0.4322, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.6991643454038997, |
|
"grad_norm": 4.679980140378487, |
|
"learning_rate": 9.472197759360322e-06, |
|
"loss": 0.4465, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.7019498607242339, |
|
"grad_norm": 2.3488711223696837, |
|
"learning_rate": 9.464925117963133e-06, |
|
"loss": 0.4067, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.7047353760445683, |
|
"grad_norm": 3.1815731903234097, |
|
"learning_rate": 9.45760554489786e-06, |
|
"loss": 0.3705, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.7075208913649025, |
|
"grad_norm": 3.1516070888840093, |
|
"learning_rate": 9.450239117101913e-06, |
|
"loss": 0.4296, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.7103064066852368, |
|
"grad_norm": 3.5185308015669516, |
|
"learning_rate": 9.442825912005203e-06, |
|
"loss": 0.4273, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.713091922005571, |
|
"grad_norm": 3.4207881838131446, |
|
"learning_rate": 9.435366007529321e-06, |
|
"loss": 0.4165, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.7158774373259053, |
|
"grad_norm": 3.719859632060157, |
|
"learning_rate": 9.427859482086728e-06, |
|
"loss": 0.3548, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.7186629526462396, |
|
"grad_norm": 2.173471573592825, |
|
"learning_rate": 9.420306414579925e-06, |
|
"loss": 0.4084, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.7214484679665738, |
|
"grad_norm": 1.6062600255026331, |
|
"learning_rate": 9.412706884400626e-06, |
|
"loss": 0.4085, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.724233983286908, |
|
"grad_norm": 3.3523586691586704, |
|
"learning_rate": 9.405060971428924e-06, |
|
"loss": 0.4173, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.7270194986072424, |
|
"grad_norm": 2.367900304816936, |
|
"learning_rate": 9.397368756032445e-06, |
|
"loss": 0.4199, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.7298050139275766, |
|
"grad_norm": 2.9608049345255933, |
|
"learning_rate": 9.389630319065518e-06, |
|
"loss": 0.422, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.7325905292479109, |
|
"grad_norm": 2.251284958564065, |
|
"learning_rate": 9.381845741868307e-06, |
|
"loss": 0.3957, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.7353760445682451, |
|
"grad_norm": 1.8793308103082458, |
|
"learning_rate": 9.374015106265968e-06, |
|
"loss": 0.3808, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.7381615598885793, |
|
"grad_norm": 1.649606908303967, |
|
"learning_rate": 9.366138494567785e-06, |
|
"loss": 0.3858, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.7409470752089137, |
|
"grad_norm": 2.835505031419529, |
|
"learning_rate": 9.358215989566304e-06, |
|
"loss": 0.4353, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.7437325905292479, |
|
"grad_norm": 2.0052918531825643, |
|
"learning_rate": 9.35024767453647e-06, |
|
"loss": 0.4006, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.7465181058495822, |
|
"grad_norm": 2.345307464311897, |
|
"learning_rate": 9.34223363323474e-06, |
|
"loss": 0.3967, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.7493036211699164, |
|
"grad_norm": 4.2927527987997, |
|
"learning_rate": 9.334173949898211e-06, |
|
"loss": 0.3881, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.7520891364902507, |
|
"grad_norm": 3.5320138990359062, |
|
"learning_rate": 9.326068709243727e-06, |
|
"loss": 0.3928, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.754874651810585, |
|
"grad_norm": 1.9851425003830176, |
|
"learning_rate": 9.317917996467004e-06, |
|
"loss": 0.3854, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.7576601671309192, |
|
"grad_norm": 1.7586901772934243, |
|
"learning_rate": 9.309721897241712e-06, |
|
"loss": 0.3882, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.7604456824512534, |
|
"grad_norm": 5.104894569821865, |
|
"learning_rate": 9.301480497718594e-06, |
|
"loss": 0.3793, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.7632311977715878, |
|
"grad_norm": 5.948849657944124, |
|
"learning_rate": 9.293193884524554e-06, |
|
"loss": 0.3995, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.766016713091922, |
|
"grad_norm": 2.153048118677158, |
|
"learning_rate": 9.284862144761736e-06, |
|
"loss": 0.369, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.7688022284122563, |
|
"grad_norm": 3.575956305245711, |
|
"learning_rate": 9.276485366006634e-06, |
|
"loss": 0.3602, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.7715877437325905, |
|
"grad_norm": 4.96228537672175, |
|
"learning_rate": 9.268063636309138e-06, |
|
"loss": 0.3802, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.7743732590529248, |
|
"grad_norm": 3.943833121398731, |
|
"learning_rate": 9.259597044191635e-06, |
|
"loss": 0.3835, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.7771587743732591, |
|
"grad_norm": 2.1867683699499896, |
|
"learning_rate": 9.251085678648072e-06, |
|
"loss": 0.3766, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.7799442896935933, |
|
"grad_norm": 3.8862107953628864, |
|
"learning_rate": 9.24252962914301e-06, |
|
"loss": 0.3953, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.7827298050139275, |
|
"grad_norm": 6.154506486654496, |
|
"learning_rate": 9.233928985610693e-06, |
|
"loss": 0.3933, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.7855153203342619, |
|
"grad_norm": 3.575670068967989, |
|
"learning_rate": 9.225283838454111e-06, |
|
"loss": 0.4203, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.7883008356545961, |
|
"grad_norm": 1.6836345336815544, |
|
"learning_rate": 9.216594278544026e-06, |
|
"loss": 0.423, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.7910863509749304, |
|
"grad_norm": 3.7855697482833675, |
|
"learning_rate": 9.20786039721804e-06, |
|
"loss": 0.3754, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.7938718662952646, |
|
"grad_norm": 3.346619414441463, |
|
"learning_rate": 9.199082286279622e-06, |
|
"loss": 0.3857, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.7966573816155988, |
|
"grad_norm": 1.4979408442924829, |
|
"learning_rate": 9.190260037997149e-06, |
|
"loss": 0.3725, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.7994428969359332, |
|
"grad_norm": 3.152356373820523, |
|
"learning_rate": 9.181393745102933e-06, |
|
"loss": 0.4187, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.8022284122562674, |
|
"grad_norm": 1.9425065680899039, |
|
"learning_rate": 9.172483500792246e-06, |
|
"loss": 0.3857, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.8050139275766016, |
|
"grad_norm": 3.535195720628514, |
|
"learning_rate": 9.163529398722341e-06, |
|
"loss": 0.3792, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.807799442896936, |
|
"grad_norm": 3.99120710625705, |
|
"learning_rate": 9.154531533011474e-06, |
|
"loss": 0.3826, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.8105849582172702, |
|
"grad_norm": 1.6121629806963718, |
|
"learning_rate": 9.145489998237902e-06, |
|
"loss": 0.4196, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.8133704735376045, |
|
"grad_norm": 4.003771829068115, |
|
"learning_rate": 9.136404889438898e-06, |
|
"loss": 0.4139, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.8161559888579387, |
|
"grad_norm": 4.151848502181565, |
|
"learning_rate": 9.127276302109751e-06, |
|
"loss": 0.4146, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.8189415041782729, |
|
"grad_norm": 1.9767101286664026, |
|
"learning_rate": 9.11810433220276e-06, |
|
"loss": 0.3892, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.8217270194986073, |
|
"grad_norm": 2.0180695788973937, |
|
"learning_rate": 9.108889076126226e-06, |
|
"loss": 0.4034, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.8245125348189415, |
|
"grad_norm": 3.0380750699819496, |
|
"learning_rate": 9.09963063074344e-06, |
|
"loss": 0.3899, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.8272980501392758, |
|
"grad_norm": 1.9538803956190012, |
|
"learning_rate": 9.090329093371667e-06, |
|
"loss": 0.4072, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.83008356545961, |
|
"grad_norm": 1.7369977528779617, |
|
"learning_rate": 9.08098456178111e-06, |
|
"loss": 0.3763, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.8328690807799443, |
|
"grad_norm": 1.664419569080736, |
|
"learning_rate": 9.071597134193902e-06, |
|
"loss": 0.3965, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.8356545961002786, |
|
"grad_norm": 1.7178719513227654, |
|
"learning_rate": 9.062166909283062e-06, |
|
"loss": 0.3785, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.8384401114206128, |
|
"grad_norm": 1.9981468580814143, |
|
"learning_rate": 9.052693986171458e-06, |
|
"loss": 0.4071, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.841225626740947, |
|
"grad_norm": 1.5017802083058862, |
|
"learning_rate": 9.043178464430767e-06, |
|
"loss": 0.3749, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.8440111420612814, |
|
"grad_norm": 1.950730015812699, |
|
"learning_rate": 9.033620444080427e-06, |
|
"loss": 0.4137, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.8467966573816156, |
|
"grad_norm": 2.020168873672014, |
|
"learning_rate": 9.024020025586592e-06, |
|
"loss": 0.4007, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.8495821727019499, |
|
"grad_norm": 2.7244602526778983, |
|
"learning_rate": 9.014377309861064e-06, |
|
"loss": 0.408, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.8523676880222841, |
|
"grad_norm": 1.4825594237249564, |
|
"learning_rate": 9.004692398260243e-06, |
|
"loss": 0.4248, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.8551532033426184, |
|
"grad_norm": 2.0220155931136556, |
|
"learning_rate": 8.99496539258406e-06, |
|
"loss": 0.3772, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.8579387186629527, |
|
"grad_norm": 1.8795459442184206, |
|
"learning_rate": 8.985196395074899e-06, |
|
"loss": 0.3808, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.8607242339832869, |
|
"grad_norm": 3.299701757882584, |
|
"learning_rate": 8.975385508416532e-06, |
|
"loss": 0.3924, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.8635097493036211, |
|
"grad_norm": 2.6776659964289062, |
|
"learning_rate": 8.965532835733035e-06, |
|
"loss": 0.4097, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.8662952646239555, |
|
"grad_norm": 2.349472946543732, |
|
"learning_rate": 8.955638480587705e-06, |
|
"loss": 0.4474, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.8690807799442897, |
|
"grad_norm": 4.7125188479530005, |
|
"learning_rate": 8.94570254698197e-06, |
|
"loss": 0.4136, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.871866295264624, |
|
"grad_norm": 3.953708882201026, |
|
"learning_rate": 8.935725139354296e-06, |
|
"loss": 0.4141, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.8746518105849582, |
|
"grad_norm": 2.7389245473930455, |
|
"learning_rate": 8.925706362579097e-06, |
|
"loss": 0.3766, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.8774373259052924, |
|
"grad_norm": 1.7758914099492815, |
|
"learning_rate": 8.915646321965615e-06, |
|
"loss": 0.4103, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.8802228412256268, |
|
"grad_norm": 6.541389372920981, |
|
"learning_rate": 8.905545123256834e-06, |
|
"loss": 0.3498, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.883008356545961, |
|
"grad_norm": 6.105301301912179, |
|
"learning_rate": 8.895402872628352e-06, |
|
"loss": 0.4042, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.8857938718662952, |
|
"grad_norm": 2.2593965836672565, |
|
"learning_rate": 8.885219676687277e-06, |
|
"loss": 0.3682, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.8885793871866295, |
|
"grad_norm": 1.9994607520188552, |
|
"learning_rate": 8.874995642471094e-06, |
|
"loss": 0.3803, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.8913649025069638, |
|
"grad_norm": 2.8314364985495786, |
|
"learning_rate": 8.864730877446555e-06, |
|
"loss": 0.4113, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.8941504178272981, |
|
"grad_norm": 1.531835427359627, |
|
"learning_rate": 8.85442548950853e-06, |
|
"loss": 0.3682, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.8969359331476323, |
|
"grad_norm": 2.1194609618718263, |
|
"learning_rate": 8.844079586978897e-06, |
|
"loss": 0.3648, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.8997214484679665, |
|
"grad_norm": 1.246221731775411, |
|
"learning_rate": 8.833693278605381e-06, |
|
"loss": 0.3869, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.9025069637883009, |
|
"grad_norm": 2.010842472789246, |
|
"learning_rate": 8.823266673560426e-06, |
|
"loss": 0.4035, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.9052924791086351, |
|
"grad_norm": 1.8183286552280535, |
|
"learning_rate": 8.812799881440039e-06, |
|
"loss": 0.3636, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.9080779944289693, |
|
"grad_norm": 3.7071331416351243, |
|
"learning_rate": 8.80229301226264e-06, |
|
"loss": 0.3622, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.9108635097493036, |
|
"grad_norm": 1.7850044298056555, |
|
"learning_rate": 8.791746176467908e-06, |
|
"loss": 0.3998, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.9136490250696379, |
|
"grad_norm": 2.564832332193455, |
|
"learning_rate": 8.78115948491562e-06, |
|
"loss": 0.3925, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.9164345403899722, |
|
"grad_norm": 2.857277821101004, |
|
"learning_rate": 8.770533048884483e-06, |
|
"loss": 0.4191, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.9192200557103064, |
|
"grad_norm": 1.8869593568296243, |
|
"learning_rate": 8.759866980070963e-06, |
|
"loss": 0.3723, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.9220055710306406, |
|
"grad_norm": 2.822749383590094, |
|
"learning_rate": 8.749161390588121e-06, |
|
"loss": 0.4373, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.924791086350975, |
|
"grad_norm": 3.6016624700199125, |
|
"learning_rate": 8.73841639296442e-06, |
|
"loss": 0.42, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.9275766016713092, |
|
"grad_norm": 3.2784265675392663, |
|
"learning_rate": 8.72763210014255e-06, |
|
"loss": 0.3618, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.9303621169916435, |
|
"grad_norm": 1.9702489813911765, |
|
"learning_rate": 8.716808625478245e-06, |
|
"loss": 0.4411, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.9331476323119777, |
|
"grad_norm": 3.417044516240844, |
|
"learning_rate": 8.705946082739085e-06, |
|
"loss": 0.3898, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.935933147632312, |
|
"grad_norm": 2.981003980173934, |
|
"learning_rate": 8.695044586103297e-06, |
|
"loss": 0.3953, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.9387186629526463, |
|
"grad_norm": 1.6519320885963353, |
|
"learning_rate": 8.684104250158565e-06, |
|
"loss": 0.3606, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.9415041782729805, |
|
"grad_norm": 1.6990242893205028, |
|
"learning_rate": 8.67312518990082e-06, |
|
"loss": 0.3997, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.9442896935933147, |
|
"grad_norm": 2.175693023942516, |
|
"learning_rate": 8.662107520733027e-06, |
|
"loss": 0.389, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.947075208913649, |
|
"grad_norm": 2.0463823251330386, |
|
"learning_rate": 8.651051358463984e-06, |
|
"loss": 0.3676, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.9498607242339833, |
|
"grad_norm": 1.2632102591580932, |
|
"learning_rate": 8.639956819307092e-06, |
|
"loss": 0.3518, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.9526462395543176, |
|
"grad_norm": 3.645865760850702, |
|
"learning_rate": 8.628824019879137e-06, |
|
"loss": 0.3835, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.9554317548746518, |
|
"grad_norm": 2.9917662559227294, |
|
"learning_rate": 8.617653077199073e-06, |
|
"loss": 0.3759, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.958217270194986, |
|
"grad_norm": 1.7719688897678194, |
|
"learning_rate": 8.606444108686775e-06, |
|
"loss": 0.3614, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.9610027855153204, |
|
"grad_norm": 1.8617440436286568, |
|
"learning_rate": 8.595197232161824e-06, |
|
"loss": 0.3747, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.9637883008356546, |
|
"grad_norm": 3.348463613819136, |
|
"learning_rate": 8.583912565842258e-06, |
|
"loss": 0.3812, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.9665738161559888, |
|
"grad_norm": 2.077751958101176, |
|
"learning_rate": 8.572590228343322e-06, |
|
"loss": 0.4058, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.9693593314763231, |
|
"grad_norm": 1.7764316892092973, |
|
"learning_rate": 8.56123033867624e-06, |
|
"loss": 0.3978, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.9721448467966574, |
|
"grad_norm": 4.217181655407024, |
|
"learning_rate": 8.549833016246948e-06, |
|
"loss": 0.3944, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.9749303621169917, |
|
"grad_norm": 1.976847762081207, |
|
"learning_rate": 8.538398380854848e-06, |
|
"loss": 0.3512, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.9777158774373259, |
|
"grad_norm": 1.6702396074908146, |
|
"learning_rate": 8.526926552691545e-06, |
|
"loss": 0.3512, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.9805013927576601, |
|
"grad_norm": 3.490239094295529, |
|
"learning_rate": 8.51541765233958e-06, |
|
"loss": 0.3693, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.9832869080779945, |
|
"grad_norm": 5.417637080744737, |
|
"learning_rate": 8.503871800771175e-06, |
|
"loss": 0.3817, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.9860724233983287, |
|
"grad_norm": 3.3491509781485878, |
|
"learning_rate": 8.492289119346944e-06, |
|
"loss": 0.3577, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.9888579387186629, |
|
"grad_norm": 2.0956347844822822, |
|
"learning_rate": 8.480669729814635e-06, |
|
"loss": 0.3782, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.9916434540389972, |
|
"grad_norm": 2.775713791665423, |
|
"learning_rate": 8.469013754307834e-06, |
|
"loss": 0.4459, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.9944289693593314, |
|
"grad_norm": 3.013967242613664, |
|
"learning_rate": 8.457321315344695e-06, |
|
"loss": 0.407, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.9972144846796658, |
|
"grad_norm": 1.650005105150807, |
|
"learning_rate": 8.445592535826643e-06, |
|
"loss": 0.4033, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.0015123642459156, |
|
"learning_rate": 8.433827539037088e-06, |
|
"loss": 0.395, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.0027855153203342, |
|
"grad_norm": 1.932588585056181, |
|
"learning_rate": 8.422026448640124e-06, |
|
"loss": 0.339, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.0055710306406684, |
|
"grad_norm": 1.9021847931061198, |
|
"learning_rate": 8.410189388679234e-06, |
|
"loss": 0.3066, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.0083565459610029, |
|
"grad_norm": 1.9198772320448578, |
|
"learning_rate": 8.398316483575981e-06, |
|
"loss": 0.3002, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.011142061281337, |
|
"grad_norm": 2.0194026625634316, |
|
"learning_rate": 8.386407858128707e-06, |
|
"loss": 0.2953, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.0139275766016713, |
|
"grad_norm": 1.836864584909459, |
|
"learning_rate": 8.374463637511212e-06, |
|
"loss": 0.2993, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.0167130919220055, |
|
"grad_norm": 1.227739289492473, |
|
"learning_rate": 8.362483947271446e-06, |
|
"loss": 0.3166, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.0194986072423398, |
|
"grad_norm": 1.9556057354458665, |
|
"learning_rate": 8.350468913330192e-06, |
|
"loss": 0.28, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.0222841225626742, |
|
"grad_norm": 3.0797459439677852, |
|
"learning_rate": 8.338418661979729e-06, |
|
"loss": 0.324, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.0250696378830084, |
|
"grad_norm": 3.218750396222369, |
|
"learning_rate": 8.326333319882516e-06, |
|
"loss": 0.2653, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.0278551532033426, |
|
"grad_norm": 3.8648322613850903, |
|
"learning_rate": 8.31421301406986e-06, |
|
"loss": 0.3124, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.0306406685236769, |
|
"grad_norm": 4.589899452796797, |
|
"learning_rate": 8.302057871940577e-06, |
|
"loss": 0.281, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.033426183844011, |
|
"grad_norm": 2.4680711272554983, |
|
"learning_rate": 8.28986802125965e-06, |
|
"loss": 0.3153, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.0362116991643453, |
|
"grad_norm": 2.1651319374463536, |
|
"learning_rate": 8.277643590156893e-06, |
|
"loss": 0.3138, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.0389972144846797, |
|
"grad_norm": 2.59985804630678, |
|
"learning_rate": 8.265384707125607e-06, |
|
"loss": 0.3073, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.041782729805014, |
|
"grad_norm": 1.9201049130058931, |
|
"learning_rate": 8.25309150102121e-06, |
|
"loss": 0.296, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.0445682451253482, |
|
"grad_norm": 4.692595611101445, |
|
"learning_rate": 8.240764101059913e-06, |
|
"loss": 0.2989, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.0473537604456824, |
|
"grad_norm": 2.944704852002463, |
|
"learning_rate": 8.228402636817331e-06, |
|
"loss": 0.3039, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.0501392757660166, |
|
"grad_norm": 1.7972459887378816, |
|
"learning_rate": 8.216007238227142e-06, |
|
"loss": 0.3028, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.052924791086351, |
|
"grad_norm": 2.413306249405345, |
|
"learning_rate": 8.203578035579716e-06, |
|
"loss": 0.2957, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.0557103064066853, |
|
"grad_norm": 1.7731249072909891, |
|
"learning_rate": 8.191115159520735e-06, |
|
"loss": 0.2856, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.0584958217270195, |
|
"grad_norm": 1.8822637861107847, |
|
"learning_rate": 8.178618741049841e-06, |
|
"loss": 0.2839, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.0612813370473537, |
|
"grad_norm": 1.7875646461877774, |
|
"learning_rate": 8.166088911519236e-06, |
|
"loss": 0.2881, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.064066852367688, |
|
"grad_norm": 1.5221412711643412, |
|
"learning_rate": 8.153525802632314e-06, |
|
"loss": 0.2783, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.0668523676880224, |
|
"grad_norm": 2.5404181716933576, |
|
"learning_rate": 8.140929546442282e-06, |
|
"loss": 0.2757, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.0696378830083566, |
|
"grad_norm": 2.332444239333717, |
|
"learning_rate": 8.128300275350756e-06, |
|
"loss": 0.3021, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.0724233983286908, |
|
"grad_norm": 1.9841182980871097, |
|
"learning_rate": 8.115638122106382e-06, |
|
"loss": 0.2827, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.075208913649025, |
|
"grad_norm": 2.626605459372405, |
|
"learning_rate": 8.102943219803433e-06, |
|
"loss": 0.3086, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.0779944289693593, |
|
"grad_norm": 2.2898447540180498, |
|
"learning_rate": 8.090215701880418e-06, |
|
"loss": 0.2757, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.0807799442896937, |
|
"grad_norm": 2.188645908320459, |
|
"learning_rate": 8.077455702118673e-06, |
|
"loss": 0.3118, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.083565459610028, |
|
"grad_norm": 2.4030462274321582, |
|
"learning_rate": 8.064663354640956e-06, |
|
"loss": 0.3101, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.0863509749303621, |
|
"grad_norm": 2.3969511596658006, |
|
"learning_rate": 8.051838793910038e-06, |
|
"loss": 0.2764, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.0891364902506964, |
|
"grad_norm": 3.415497473735717, |
|
"learning_rate": 8.038982154727288e-06, |
|
"loss": 0.2949, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.0919220055710306, |
|
"grad_norm": 3.453096159648674, |
|
"learning_rate": 8.026093572231266e-06, |
|
"loss": 0.3208, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.0947075208913648, |
|
"grad_norm": 2.4553669597194934, |
|
"learning_rate": 8.013173181896283e-06, |
|
"loss": 0.2874, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.0974930362116992, |
|
"grad_norm": 3.0596364954155586, |
|
"learning_rate": 8.000221119530993e-06, |
|
"loss": 0.2841, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.1002785515320335, |
|
"grad_norm": 1.9524008055413438, |
|
"learning_rate": 7.987237521276962e-06, |
|
"loss": 0.2742, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.1030640668523677, |
|
"grad_norm": 1.2928628243461395, |
|
"learning_rate": 7.974222523607236e-06, |
|
"loss": 0.2803, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.105849582172702, |
|
"grad_norm": 3.570554102077123, |
|
"learning_rate": 7.961176263324902e-06, |
|
"loss": 0.3177, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.1086350974930361, |
|
"grad_norm": 2.945103184270358, |
|
"learning_rate": 7.948098877561657e-06, |
|
"loss": 0.3017, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.1114206128133706, |
|
"grad_norm": 1.5680656933856998, |
|
"learning_rate": 7.934990503776363e-06, |
|
"loss": 0.3085, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.1142061281337048, |
|
"grad_norm": 1.2772044396629245, |
|
"learning_rate": 7.921851279753606e-06, |
|
"loss": 0.2896, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.116991643454039, |
|
"grad_norm": 2.0756399153018794, |
|
"learning_rate": 7.90868134360224e-06, |
|
"loss": 0.307, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.1197771587743732, |
|
"grad_norm": 3.159356183260518, |
|
"learning_rate": 7.895480833753942e-06, |
|
"loss": 0.2966, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.1225626740947074, |
|
"grad_norm": 1.4249067285729795, |
|
"learning_rate": 7.882249888961755e-06, |
|
"loss": 0.2842, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.1253481894150417, |
|
"grad_norm": 1.7020390245884693, |
|
"learning_rate": 7.868988648298632e-06, |
|
"loss": 0.285, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.128133704735376, |
|
"grad_norm": 2.104189056030107, |
|
"learning_rate": 7.855697251155967e-06, |
|
"loss": 0.305, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.1309192200557103, |
|
"grad_norm": 2.0103696002381484, |
|
"learning_rate": 7.842375837242135e-06, |
|
"loss": 0.314, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.1337047353760445, |
|
"grad_norm": 1.6058555814982336, |
|
"learning_rate": 7.829024546581028e-06, |
|
"loss": 0.292, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.1364902506963788, |
|
"grad_norm": 4.136968379736762, |
|
"learning_rate": 7.815643519510571e-06, |
|
"loss": 0.2964, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.1392757660167132, |
|
"grad_norm": 1.7488856327397928, |
|
"learning_rate": 7.802232896681259e-06, |
|
"loss": 0.2954, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.1420612813370474, |
|
"grad_norm": 3.1050927307720158, |
|
"learning_rate": 7.788792819054672e-06, |
|
"loss": 0.2912, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.1448467966573816, |
|
"grad_norm": 4.346824524754197, |
|
"learning_rate": 7.775323427901993e-06, |
|
"loss": 0.2799, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.1476323119777159, |
|
"grad_norm": 3.848544227108635, |
|
"learning_rate": 7.76182486480253e-06, |
|
"loss": 0.2965, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.15041782729805, |
|
"grad_norm": 2.355661854220899, |
|
"learning_rate": 7.748297271642218e-06, |
|
"loss": 0.2811, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.1532033426183843, |
|
"grad_norm": 3.4954469177059266, |
|
"learning_rate": 7.734740790612137e-06, |
|
"loss": 0.3054, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.1559888579387188, |
|
"grad_norm": 3.32344752269158, |
|
"learning_rate": 7.721155564207003e-06, |
|
"loss": 0.282, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.158774373259053, |
|
"grad_norm": 2.233301397741439, |
|
"learning_rate": 7.707541735223696e-06, |
|
"loss": 0.2972, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.1615598885793872, |
|
"grad_norm": 3.0128078545798784, |
|
"learning_rate": 7.693899446759727e-06, |
|
"loss": 0.3112, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.1643454038997214, |
|
"grad_norm": 2.5919284586454387, |
|
"learning_rate": 7.680228842211762e-06, |
|
"loss": 0.3077, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.1671309192200556, |
|
"grad_norm": 3.208755076162829, |
|
"learning_rate": 7.666530065274096e-06, |
|
"loss": 0.2894, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.16991643454039, |
|
"grad_norm": 2.282458795854026, |
|
"learning_rate": 7.65280325993715e-06, |
|
"loss": 0.3008, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.1727019498607243, |
|
"grad_norm": 1.9891904321584388, |
|
"learning_rate": 7.63904857048596e-06, |
|
"loss": 0.2945, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.1754874651810585, |
|
"grad_norm": 2.1584854827128575, |
|
"learning_rate": 7.625266141498653e-06, |
|
"loss": 0.2943, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.1782729805013927, |
|
"grad_norm": 3.0793408700547067, |
|
"learning_rate": 7.611456117844934e-06, |
|
"loss": 0.3074, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.181058495821727, |
|
"grad_norm": 2.4702381464869503, |
|
"learning_rate": 7.597618644684561e-06, |
|
"loss": 0.3057, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.1838440111420612, |
|
"grad_norm": 2.5527274172231698, |
|
"learning_rate": 7.583753867465819e-06, |
|
"loss": 0.2715, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.1866295264623956, |
|
"grad_norm": 2.414279200281529, |
|
"learning_rate": 7.569861931923989e-06, |
|
"loss": 0.3059, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.1894150417827298, |
|
"grad_norm": 3.077658269847486, |
|
"learning_rate": 7.5559429840798185e-06, |
|
"loss": 0.2927, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.192200557103064, |
|
"grad_norm": 2.386047851409706, |
|
"learning_rate": 7.541997170237989e-06, |
|
"loss": 0.3161, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.1949860724233983, |
|
"grad_norm": 1.7809421948138482, |
|
"learning_rate": 7.528024636985575e-06, |
|
"loss": 0.2791, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.1977715877437327, |
|
"grad_norm": 2.148807293227316, |
|
"learning_rate": 7.514025531190499e-06, |
|
"loss": 0.3061, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.200557103064067, |
|
"grad_norm": 5.468088360447406, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.2916, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.2033426183844012, |
|
"grad_norm": 2.53365208146336, |
|
"learning_rate": 7.485948190839076e-06, |
|
"loss": 0.303, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.2061281337047354, |
|
"grad_norm": 2.1003248962543632, |
|
"learning_rate": 7.4718702514089324e-06, |
|
"loss": 0.308, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.2089136490250696, |
|
"grad_norm": 4.499313662806317, |
|
"learning_rate": 7.457766329685444e-06, |
|
"loss": 0.2779, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.2116991643454038, |
|
"grad_norm": 2.8825487551060442, |
|
"learning_rate": 7.443636573917585e-06, |
|
"loss": 0.3076, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.2144846796657383, |
|
"grad_norm": 2.425610118627699, |
|
"learning_rate": 7.429481132625876e-06, |
|
"loss": 0.2733, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.2172701949860725, |
|
"grad_norm": 2.278532778557837, |
|
"learning_rate": 7.4153001546008245e-06, |
|
"loss": 0.2965, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.2200557103064067, |
|
"grad_norm": 1.513748538004442, |
|
"learning_rate": 7.40109378890136e-06, |
|
"loss": 0.2842, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.222841225626741, |
|
"grad_norm": 1.9354386361780191, |
|
"learning_rate": 7.386862184853264e-06, |
|
"loss": 0.3052, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.2256267409470751, |
|
"grad_norm": 2.486523009112946, |
|
"learning_rate": 7.372605492047605e-06, |
|
"loss": 0.3189, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.2284122562674096, |
|
"grad_norm": 1.6430436252687675, |
|
"learning_rate": 7.358323860339165e-06, |
|
"loss": 0.2723, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.2311977715877438, |
|
"grad_norm": 2.362342228157553, |
|
"learning_rate": 7.344017439844862e-06, |
|
"loss": 0.2844, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.233983286908078, |
|
"grad_norm": 1.6873610458902926, |
|
"learning_rate": 7.329686380942172e-06, |
|
"loss": 0.2874, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.2367688022284122, |
|
"grad_norm": 1.3910118700367105, |
|
"learning_rate": 7.315330834267553e-06, |
|
"loss": 0.2879, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.2395543175487465, |
|
"grad_norm": 3.3591546401457792, |
|
"learning_rate": 7.300950950714859e-06, |
|
"loss": 0.3162, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.2423398328690807, |
|
"grad_norm": 4.613302410765733, |
|
"learning_rate": 7.28654688143375e-06, |
|
"loss": 0.3084, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.2451253481894151, |
|
"grad_norm": 1.2904247598425311, |
|
"learning_rate": 7.272118777828109e-06, |
|
"loss": 0.2949, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.2479108635097493, |
|
"grad_norm": 1.8891299314016199, |
|
"learning_rate": 7.257666791554448e-06, |
|
"loss": 0.3143, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.2506963788300836, |
|
"grad_norm": 2.4734920059962664, |
|
"learning_rate": 7.243191074520314e-06, |
|
"loss": 0.3138, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.2534818941504178, |
|
"grad_norm": 3.127465106601424, |
|
"learning_rate": 7.2286917788826926e-06, |
|
"loss": 0.2812, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.2562674094707522, |
|
"grad_norm": 1.8250796415251553, |
|
"learning_rate": 7.2141690570464074e-06, |
|
"loss": 0.308, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.2590529247910864, |
|
"grad_norm": 1.8598730279661255, |
|
"learning_rate": 7.199623061662524e-06, |
|
"loss": 0.2897, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.2618384401114207, |
|
"grad_norm": 2.805778731990682, |
|
"learning_rate": 7.185053945626734e-06, |
|
"loss": 0.2706, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.2646239554317549, |
|
"grad_norm": 1.7068991011427281, |
|
"learning_rate": 7.170461862077759e-06, |
|
"loss": 0.2835, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.267409470752089, |
|
"grad_norm": 2.3430462843621616, |
|
"learning_rate": 7.155846964395734e-06, |
|
"loss": 0.2931, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.2701949860724233, |
|
"grad_norm": 1.2942651689546731, |
|
"learning_rate": 7.1412094062005985e-06, |
|
"loss": 0.275, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.2729805013927575, |
|
"grad_norm": 1.4378267784266803, |
|
"learning_rate": 7.1265493413504815e-06, |
|
"loss": 0.2791, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.275766016713092, |
|
"grad_norm": 2.3167388629118006, |
|
"learning_rate": 7.111866923940083e-06, |
|
"loss": 0.2885, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.2785515320334262, |
|
"grad_norm": 2.183214942552782, |
|
"learning_rate": 7.097162308299055e-06, |
|
"loss": 0.2828, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.2813370473537604, |
|
"grad_norm": 2.076174417334502, |
|
"learning_rate": 7.082435648990381e-06, |
|
"loss": 0.2863, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.2841225626740946, |
|
"grad_norm": 2.281228092315863, |
|
"learning_rate": 7.0676871008087465e-06, |
|
"loss": 0.3056, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.286908077994429, |
|
"grad_norm": 2.853603222087058, |
|
"learning_rate": 7.052916818778918e-06, |
|
"loss": 0.2977, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.2896935933147633, |
|
"grad_norm": 1.4487120782426728, |
|
"learning_rate": 7.038124958154108e-06, |
|
"loss": 0.2593, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.2924791086350975, |
|
"grad_norm": 1.88632252466371, |
|
"learning_rate": 7.023311674414346e-06, |
|
"loss": 0.2889, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.2952646239554317, |
|
"grad_norm": 2.126446130011026, |
|
"learning_rate": 7.008477123264849e-06, |
|
"loss": 0.3088, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.298050139275766, |
|
"grad_norm": 2.1142469698905413, |
|
"learning_rate": 6.993621460634371e-06, |
|
"loss": 0.2708, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.3008356545961002, |
|
"grad_norm": 2.0953137987354578, |
|
"learning_rate": 6.978744842673578e-06, |
|
"loss": 0.2947, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.3036211699164346, |
|
"grad_norm": 2.543231719405118, |
|
"learning_rate": 6.9638474257534025e-06, |
|
"loss": 0.3129, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.3064066852367688, |
|
"grad_norm": 3.026122054732199, |
|
"learning_rate": 6.948929366463397e-06, |
|
"loss": 0.2772, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.309192200557103, |
|
"grad_norm": 2.426688491358649, |
|
"learning_rate": 6.93399082161009e-06, |
|
"loss": 0.2814, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.3119777158774373, |
|
"grad_norm": 1.9907948634476968, |
|
"learning_rate": 6.919031948215335e-06, |
|
"loss": 0.2949, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.3147632311977717, |
|
"grad_norm": 4.374768713615575, |
|
"learning_rate": 6.904052903514668e-06, |
|
"loss": 0.2778, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.317548746518106, |
|
"grad_norm": 3.6364066286922654, |
|
"learning_rate": 6.889053844955644e-06, |
|
"loss": 0.3027, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.3203342618384402, |
|
"grad_norm": 1.560050832982693, |
|
"learning_rate": 6.874034930196191e-06, |
|
"loss": 0.3218, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.3231197771587744, |
|
"grad_norm": 1.715474138138304, |
|
"learning_rate": 6.8589963171029475e-06, |
|
"loss": 0.2882, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.3259052924791086, |
|
"grad_norm": 4.4288052115230085, |
|
"learning_rate": 6.843938163749608e-06, |
|
"loss": 0.2609, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.3286908077994428, |
|
"grad_norm": 1.9255530506625296, |
|
"learning_rate": 6.8288606284152535e-06, |
|
"loss": 0.3011, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.331476323119777, |
|
"grad_norm": 1.5313698052801556, |
|
"learning_rate": 6.813763869582694e-06, |
|
"loss": 0.2945, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.3342618384401115, |
|
"grad_norm": 1.8698269775547134, |
|
"learning_rate": 6.798648045936807e-06, |
|
"loss": 0.2927, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.3370473537604457, |
|
"grad_norm": 2.1341099394442296, |
|
"learning_rate": 6.783513316362855e-06, |
|
"loss": 0.2922, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.33983286908078, |
|
"grad_norm": 3.629974587764582, |
|
"learning_rate": 6.768359839944829e-06, |
|
"loss": 0.298, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.3426183844011141, |
|
"grad_norm": 2.248907375123776, |
|
"learning_rate": 6.753187775963773e-06, |
|
"loss": 0.2779, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.3454038997214486, |
|
"grad_norm": 3.9583732310164867, |
|
"learning_rate": 6.737997283896104e-06, |
|
"loss": 0.314, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.3481894150417828, |
|
"grad_norm": 4.395093752932164, |
|
"learning_rate": 6.722788523411945e-06, |
|
"loss": 0.2954, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.350974930362117, |
|
"grad_norm": 4.489977085052979, |
|
"learning_rate": 6.707561654373436e-06, |
|
"loss": 0.3177, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.3537604456824512, |
|
"grad_norm": 1.9126292912432845, |
|
"learning_rate": 6.692316836833066e-06, |
|
"loss": 0.271, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.3565459610027855, |
|
"grad_norm": 2.7338376331916012, |
|
"learning_rate": 6.677054231031981e-06, |
|
"loss": 0.2965, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.3593314763231197, |
|
"grad_norm": 3.8744884745496293, |
|
"learning_rate": 6.6617739973982985e-06, |
|
"loss": 0.2692, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.362116991643454, |
|
"grad_norm": 3.8751676455743365, |
|
"learning_rate": 6.646476296545434e-06, |
|
"loss": 0.2993, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.3649025069637883, |
|
"grad_norm": 2.0055237976008233, |
|
"learning_rate": 6.631161289270398e-06, |
|
"loss": 0.307, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.3676880222841226, |
|
"grad_norm": 2.2749197087276207, |
|
"learning_rate": 6.615829136552112e-06, |
|
"loss": 0.2885, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.3704735376044568, |
|
"grad_norm": 3.6039402387410004, |
|
"learning_rate": 6.600479999549721e-06, |
|
"loss": 0.2867, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.3732590529247912, |
|
"grad_norm": 1.8556431923541905, |
|
"learning_rate": 6.585114039600891e-06, |
|
"loss": 0.3028, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.3760445682451254, |
|
"grad_norm": 1.472757268153259, |
|
"learning_rate": 6.569731418220119e-06, |
|
"loss": 0.2908, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.3788300835654597, |
|
"grad_norm": 3.4669961974701042, |
|
"learning_rate": 6.554332297097032e-06, |
|
"loss": 0.2818, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.3816155988857939, |
|
"grad_norm": 1.3782636763019214, |
|
"learning_rate": 6.538916838094691e-06, |
|
"loss": 0.276, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.384401114206128, |
|
"grad_norm": 1.9455540914671525, |
|
"learning_rate": 6.523485203247886e-06, |
|
"loss": 0.3226, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.3871866295264623, |
|
"grad_norm": 2.203045420734133, |
|
"learning_rate": 6.5080375547614325e-06, |
|
"loss": 0.3045, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.3899721448467965, |
|
"grad_norm": 2.134714016183449, |
|
"learning_rate": 6.492574055008474e-06, |
|
"loss": 0.3213, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.392757660167131, |
|
"grad_norm": 1.865755404674495, |
|
"learning_rate": 6.477094866528764e-06, |
|
"loss": 0.3091, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.392757660167131, |
|
"eval_loss": 0.2814265787601471, |
|
"eval_runtime": 818.635, |
|
"eval_samples_per_second": 102.042, |
|
"eval_steps_per_second": 3.189, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.3955431754874652, |
|
"grad_norm": 1.5467011234415777, |
|
"learning_rate": 6.461600152026966e-06, |
|
"loss": 0.2916, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.3983286908077994, |
|
"grad_norm": 1.374038237297588, |
|
"learning_rate": 6.446090074370939e-06, |
|
"loss": 0.316, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.4011142061281336, |
|
"grad_norm": 2.0045925166264533, |
|
"learning_rate": 6.430564796590028e-06, |
|
"loss": 0.2805, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.403899721448468, |
|
"grad_norm": 1.7947417608551275, |
|
"learning_rate": 6.415024481873352e-06, |
|
"loss": 0.2695, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.4066852367688023, |
|
"grad_norm": 1.6193885128078487, |
|
"learning_rate": 6.399469293568079e-06, |
|
"loss": 0.3167, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.4094707520891365, |
|
"grad_norm": 2.437778686816235, |
|
"learning_rate": 6.383899395177724e-06, |
|
"loss": 0.3049, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.4122562674094707, |
|
"grad_norm": 3.3374902207902952, |
|
"learning_rate": 6.368314950360416e-06, |
|
"loss": 0.3045, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.415041782729805, |
|
"grad_norm": 2.134097683575017, |
|
"learning_rate": 6.352716122927187e-06, |
|
"loss": 0.3001, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.4178272980501392, |
|
"grad_norm": 2.1926515892092566, |
|
"learning_rate": 6.337103076840248e-06, |
|
"loss": 0.2903, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.4206128133704734, |
|
"grad_norm": 1.4719401694557852, |
|
"learning_rate": 6.321475976211267e-06, |
|
"loss": 0.3158, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.4233983286908078, |
|
"grad_norm": 1.5010130875586007, |
|
"learning_rate": 6.3058349852996345e-06, |
|
"loss": 0.2696, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.426183844011142, |
|
"grad_norm": 1.966513584939353, |
|
"learning_rate": 6.290180268510753e-06, |
|
"loss": 0.2794, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.4289693593314763, |
|
"grad_norm": 1.2909781019524853, |
|
"learning_rate": 6.274511990394294e-06, |
|
"loss": 0.2659, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.4317548746518105, |
|
"grad_norm": 1.6554197676099942, |
|
"learning_rate": 6.258830315642479e-06, |
|
"loss": 0.2862, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.434540389972145, |
|
"grad_norm": 1.3569073662333675, |
|
"learning_rate": 6.243135409088341e-06, |
|
"loss": 0.2876, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.4373259052924792, |
|
"grad_norm": 3.298515734330952, |
|
"learning_rate": 6.227427435703997e-06, |
|
"loss": 0.3145, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.4401114206128134, |
|
"grad_norm": 2.4725266359773146, |
|
"learning_rate": 6.211706560598909e-06, |
|
"loss": 0.2617, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.4428969359331476, |
|
"grad_norm": 2.4040297826720907, |
|
"learning_rate": 6.195972949018157e-06, |
|
"loss": 0.2927, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.4456824512534818, |
|
"grad_norm": 1.7350263327452882, |
|
"learning_rate": 6.180226766340688e-06, |
|
"loss": 0.2934, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.448467966573816, |
|
"grad_norm": 1.6855883791393933, |
|
"learning_rate": 6.164468178077595e-06, |
|
"loss": 0.2743, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.4512534818941505, |
|
"grad_norm": 2.4781122323281206, |
|
"learning_rate": 6.148697349870364e-06, |
|
"loss": 0.3122, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.4540389972144847, |
|
"grad_norm": 3.1010206900030837, |
|
"learning_rate": 6.132914447489137e-06, |
|
"loss": 0.2723, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.456824512534819, |
|
"grad_norm": 2.8999626577062756, |
|
"learning_rate": 6.117119636830971e-06, |
|
"loss": 0.2921, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.4596100278551531, |
|
"grad_norm": 3.467917999076453, |
|
"learning_rate": 6.1013130839180936e-06, |
|
"loss": 0.279, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.4623955431754876, |
|
"grad_norm": 3.007429645366548, |
|
"learning_rate": 6.085494954896156e-06, |
|
"loss": 0.275, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.4651810584958218, |
|
"grad_norm": 2.3663569431773457, |
|
"learning_rate": 6.0696654160324875e-06, |
|
"loss": 0.2742, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.467966573816156, |
|
"grad_norm": 2.5622119787222415, |
|
"learning_rate": 6.053824633714352e-06, |
|
"loss": 0.2802, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.4707520891364902, |
|
"grad_norm": 3.1029887172902693, |
|
"learning_rate": 6.037972774447194e-06, |
|
"loss": 0.2717, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.4735376044568245, |
|
"grad_norm": 2.2784268908678222, |
|
"learning_rate": 6.0221100048528866e-06, |
|
"loss": 0.3131, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.4763231197771587, |
|
"grad_norm": 2.7667714975728366, |
|
"learning_rate": 6.0062364916679885e-06, |
|
"loss": 0.3072, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.479108635097493, |
|
"grad_norm": 2.1954807587123932, |
|
"learning_rate": 5.990352401741981e-06, |
|
"loss": 0.2848, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.4818941504178273, |
|
"grad_norm": 3.9458402872310363, |
|
"learning_rate": 5.974457902035524e-06, |
|
"loss": 0.3323, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.4846796657381616, |
|
"grad_norm": 2.3578197356919604, |
|
"learning_rate": 5.958553159618693e-06, |
|
"loss": 0.2547, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.4874651810584958, |
|
"grad_norm": 1.9959921869115924, |
|
"learning_rate": 5.94263834166923e-06, |
|
"loss": 0.2731, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.49025069637883, |
|
"grad_norm": 1.9231676191891314, |
|
"learning_rate": 5.926713615470781e-06, |
|
"loss": 0.2911, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.4930362116991645, |
|
"grad_norm": 1.9181175145942784, |
|
"learning_rate": 5.910779148411139e-06, |
|
"loss": 0.2876, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.4958217270194987, |
|
"grad_norm": 1.26319210778614, |
|
"learning_rate": 5.8948351079804875e-06, |
|
"loss": 0.2549, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.498607242339833, |
|
"grad_norm": 2.369593083593985, |
|
"learning_rate": 5.878881661769633e-06, |
|
"loss": 0.285, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.501392757660167, |
|
"grad_norm": 2.5596514631765612, |
|
"learning_rate": 5.8629189774682524e-06, |
|
"loss": 0.3115, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.5041782729805013, |
|
"grad_norm": 2.9709467049521354, |
|
"learning_rate": 5.846947222863123e-06, |
|
"loss": 0.319, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.5069637883008355, |
|
"grad_norm": 2.655564578116425, |
|
"learning_rate": 5.830966565836365e-06, |
|
"loss": 0.2849, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.5097493036211698, |
|
"grad_norm": 2.2144213139981512, |
|
"learning_rate": 5.8149771743636675e-06, |
|
"loss": 0.2821, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.5125348189415042, |
|
"grad_norm": 1.899250024771315, |
|
"learning_rate": 5.798979216512536e-06, |
|
"loss": 0.2966, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.5153203342618384, |
|
"grad_norm": 3.4356575828432687, |
|
"learning_rate": 5.782972860440517e-06, |
|
"loss": 0.3014, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.5181058495821727, |
|
"grad_norm": 3.1095089865902015, |
|
"learning_rate": 5.766958274393428e-06, |
|
"loss": 0.2943, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.520891364902507, |
|
"grad_norm": 2.4967144349152504, |
|
"learning_rate": 5.750935626703598e-06, |
|
"loss": 0.2842, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.5236768802228413, |
|
"grad_norm": 1.3518618010552041, |
|
"learning_rate": 5.734905085788091e-06, |
|
"loss": 0.2778, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.5264623955431755, |
|
"grad_norm": 2.6550746818853046, |
|
"learning_rate": 5.71886682014694e-06, |
|
"loss": 0.2901, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.5292479108635098, |
|
"grad_norm": 4.465215821078249, |
|
"learning_rate": 5.702820998361374e-06, |
|
"loss": 0.2966, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.532033426183844, |
|
"grad_norm": 2.35326642935994, |
|
"learning_rate": 5.686767789092041e-06, |
|
"loss": 0.257, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.5348189415041782, |
|
"grad_norm": 1.7584748632421439, |
|
"learning_rate": 5.670707361077249e-06, |
|
"loss": 0.2698, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.5376044568245124, |
|
"grad_norm": 2.474855889611686, |
|
"learning_rate": 5.6546398831311774e-06, |
|
"loss": 0.2669, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.5403899721448466, |
|
"grad_norm": 2.8426424040946943, |
|
"learning_rate": 5.638565524142111e-06, |
|
"loss": 0.2935, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.543175487465181, |
|
"grad_norm": 2.029148244071259, |
|
"learning_rate": 5.622484453070659e-06, |
|
"loss": 0.3046, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.5459610027855153, |
|
"grad_norm": 2.4217369074743766, |
|
"learning_rate": 5.606396838947988e-06, |
|
"loss": 0.2857, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.5487465181058497, |
|
"grad_norm": 1.8730748125523047, |
|
"learning_rate": 5.5903028508740385e-06, |
|
"loss": 0.302, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.551532033426184, |
|
"grad_norm": 1.843895158305428, |
|
"learning_rate": 5.574202658015744e-06, |
|
"loss": 0.306, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.5543175487465182, |
|
"grad_norm": 2.951993887915063, |
|
"learning_rate": 5.558096429605263e-06, |
|
"loss": 0.2907, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.5571030640668524, |
|
"grad_norm": 3.817240711829453, |
|
"learning_rate": 5.541984334938193e-06, |
|
"loss": 0.2932, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.5598885793871866, |
|
"grad_norm": 1.4182157385577547, |
|
"learning_rate": 5.525866543371794e-06, |
|
"loss": 0.3061, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.5626740947075208, |
|
"grad_norm": 1.5928042471775785, |
|
"learning_rate": 5.509743224323203e-06, |
|
"loss": 0.2863, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.565459610027855, |
|
"grad_norm": 1.4318021592977268, |
|
"learning_rate": 5.493614547267664e-06, |
|
"loss": 0.2712, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.5682451253481893, |
|
"grad_norm": 3.233462447854743, |
|
"learning_rate": 5.477480681736734e-06, |
|
"loss": 0.2701, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.5710306406685237, |
|
"grad_norm": 3.0950400469548356, |
|
"learning_rate": 5.46134179731651e-06, |
|
"loss": 0.2766, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.573816155988858, |
|
"grad_norm": 2.827592231152963, |
|
"learning_rate": 5.445198063645844e-06, |
|
"loss": 0.2939, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.5766016713091922, |
|
"grad_norm": 1.5453508944319259, |
|
"learning_rate": 5.4290496504145595e-06, |
|
"loss": 0.2808, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.5793871866295266, |
|
"grad_norm": 2.5778763259419617, |
|
"learning_rate": 5.412896727361663e-06, |
|
"loss": 0.2602, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.5821727019498608, |
|
"grad_norm": 2.2068222399489783, |
|
"learning_rate": 5.396739464273569e-06, |
|
"loss": 0.2892, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.584958217270195, |
|
"grad_norm": 1.9474791457513625, |
|
"learning_rate": 5.380578030982313e-06, |
|
"loss": 0.2834, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.5877437325905293, |
|
"grad_norm": 1.8464291842077598, |
|
"learning_rate": 5.36441259736376e-06, |
|
"loss": 0.2914, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.5905292479108635, |
|
"grad_norm": 1.1703406678326422, |
|
"learning_rate": 5.348243333335823e-06, |
|
"loss": 0.2745, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.5933147632311977, |
|
"grad_norm": 2.4131355428147185, |
|
"learning_rate": 5.332070408856681e-06, |
|
"loss": 0.2868, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.596100278551532, |
|
"grad_norm": 2.717895784075401, |
|
"learning_rate": 5.3158939939229855e-06, |
|
"loss": 0.304, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.5988857938718661, |
|
"grad_norm": 3.259252770682078, |
|
"learning_rate": 5.299714258568077e-06, |
|
"loss": 0.2799, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.6016713091922006, |
|
"grad_norm": 3.3305440863623548, |
|
"learning_rate": 5.283531372860201e-06, |
|
"loss": 0.2706, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.6044568245125348, |
|
"grad_norm": 1.7033814390423045, |
|
"learning_rate": 5.26734550690071e-06, |
|
"loss": 0.2613, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.6072423398328692, |
|
"grad_norm": 2.963221782232955, |
|
"learning_rate": 5.251156830822293e-06, |
|
"loss": 0.2748, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.6100278551532035, |
|
"grad_norm": 4.0063659542129715, |
|
"learning_rate": 5.234965514787164e-06, |
|
"loss": 0.2557, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.6128133704735377, |
|
"grad_norm": 4.422537873040853, |
|
"learning_rate": 5.218771728985296e-06, |
|
"loss": 0.2897, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.615598885793872, |
|
"grad_norm": 3.024924217549378, |
|
"learning_rate": 5.202575643632619e-06, |
|
"loss": 0.2844, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.6183844011142061, |
|
"grad_norm": 2.138215919927983, |
|
"learning_rate": 5.186377428969232e-06, |
|
"loss": 0.2866, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.6211699164345403, |
|
"grad_norm": 1.240226840463157, |
|
"learning_rate": 5.170177255257618e-06, |
|
"loss": 0.254, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.6239554317548746, |
|
"grad_norm": 2.3154982294085173, |
|
"learning_rate": 5.153975292780852e-06, |
|
"loss": 0.297, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.6267409470752088, |
|
"grad_norm": 3.1894329504429892, |
|
"learning_rate": 5.137771711840811e-06, |
|
"loss": 0.2859, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.6295264623955432, |
|
"grad_norm": 3.55154773462531, |
|
"learning_rate": 5.12156668275638e-06, |
|
"loss": 0.2936, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.6323119777158774, |
|
"grad_norm": 2.3286918046440848, |
|
"learning_rate": 5.105360375861673e-06, |
|
"loss": 0.2703, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.6350974930362117, |
|
"grad_norm": 4.178139287722146, |
|
"learning_rate": 5.0891529615042305e-06, |
|
"loss": 0.2876, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.637883008356546, |
|
"grad_norm": 3.232229298438426, |
|
"learning_rate": 5.0729446100432326e-06, |
|
"loss": 0.2748, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.6406685236768803, |
|
"grad_norm": 2.0898391201834046, |
|
"learning_rate": 5.056735491847712e-06, |
|
"loss": 0.2887, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.6434540389972145, |
|
"grad_norm": 2.5617157098331313, |
|
"learning_rate": 5.040525777294762e-06, |
|
"loss": 0.2982, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.6462395543175488, |
|
"grad_norm": 2.8681424124256827, |
|
"learning_rate": 5.024315636767738e-06, |
|
"loss": 0.2847, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.649025069637883, |
|
"grad_norm": 3.794732733789886, |
|
"learning_rate": 5.008105240654484e-06, |
|
"loss": 0.277, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.6518105849582172, |
|
"grad_norm": 4.2614563914015005, |
|
"learning_rate": 4.991894759345519e-06, |
|
"loss": 0.3033, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.6545961002785514, |
|
"grad_norm": 3.2347184262406485, |
|
"learning_rate": 4.975684363232263e-06, |
|
"loss": 0.2845, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.6573816155988856, |
|
"grad_norm": 1.944671535185539, |
|
"learning_rate": 4.959474222705241e-06, |
|
"loss": 0.2807, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.66016713091922, |
|
"grad_norm": 3.537163005202794, |
|
"learning_rate": 4.94326450815229e-06, |
|
"loss": 0.2954, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.6629526462395543, |
|
"grad_norm": 3.0948934910556645, |
|
"learning_rate": 4.927055389956768e-06, |
|
"loss": 0.2866, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.6657381615598887, |
|
"grad_norm": 3.203020569913935, |
|
"learning_rate": 4.910847038495771e-06, |
|
"loss": 0.2727, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.668523676880223, |
|
"grad_norm": 2.994648402687771, |
|
"learning_rate": 4.894639624138327e-06, |
|
"loss": 0.2658, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.6713091922005572, |
|
"grad_norm": 1.7310114843548488, |
|
"learning_rate": 4.878433317243621e-06, |
|
"loss": 0.3046, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.6740947075208914, |
|
"grad_norm": 2.2498865998485704, |
|
"learning_rate": 4.862228288159191e-06, |
|
"loss": 0.2727, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.6768802228412256, |
|
"grad_norm": 3.103060269344239, |
|
"learning_rate": 4.846024707219149e-06, |
|
"loss": 0.2789, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.6796657381615598, |
|
"grad_norm": 4.521698118094593, |
|
"learning_rate": 4.829822744742383e-06, |
|
"loss": 0.2624, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.682451253481894, |
|
"grad_norm": 1.9031320700845489, |
|
"learning_rate": 4.81362257103077e-06, |
|
"loss": 0.2842, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.6852367688022283, |
|
"grad_norm": 2.1728398377535822, |
|
"learning_rate": 4.797424356367383e-06, |
|
"loss": 0.2863, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.6880222841225627, |
|
"grad_norm": 2.6669341880143103, |
|
"learning_rate": 4.781228271014704e-06, |
|
"loss": 0.2786, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.690807799442897, |
|
"grad_norm": 3.639052522723416, |
|
"learning_rate": 4.765034485212838e-06, |
|
"loss": 0.2812, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.6935933147632312, |
|
"grad_norm": 3.134005199063706, |
|
"learning_rate": 4.74884316917771e-06, |
|
"loss": 0.2794, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.6963788300835656, |
|
"grad_norm": 1.993968145562614, |
|
"learning_rate": 4.7326544930992905e-06, |
|
"loss": 0.3088, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.6991643454038998, |
|
"grad_norm": 1.4986296776370491, |
|
"learning_rate": 4.7164686271398005e-06, |
|
"loss": 0.2951, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.701949860724234, |
|
"grad_norm": 3.350065943328308, |
|
"learning_rate": 4.700285741431924e-06, |
|
"loss": 0.2873, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.7047353760445683, |
|
"grad_norm": 1.9755219628853173, |
|
"learning_rate": 4.684106006077015e-06, |
|
"loss": 0.3198, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.7075208913649025, |
|
"grad_norm": 1.522736504810297, |
|
"learning_rate": 4.6679295911433215e-06, |
|
"loss": 0.2866, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.7103064066852367, |
|
"grad_norm": 1.2069328740322993, |
|
"learning_rate": 4.651756666664178e-06, |
|
"loss": 0.2854, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.713091922005571, |
|
"grad_norm": 1.8255053619925121, |
|
"learning_rate": 4.635587402636241e-06, |
|
"loss": 0.2832, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.7158774373259051, |
|
"grad_norm": 1.7518422372424065, |
|
"learning_rate": 4.619421969017688e-06, |
|
"loss": 0.2722, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.7186629526462396, |
|
"grad_norm": 2.608271780568857, |
|
"learning_rate": 4.603260535726432e-06, |
|
"loss": 0.2919, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.7214484679665738, |
|
"grad_norm": 2.4133675617414045, |
|
"learning_rate": 4.587103272638339e-06, |
|
"loss": 0.279, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.724233983286908, |
|
"grad_norm": 1.374862069425547, |
|
"learning_rate": 4.570950349585442e-06, |
|
"loss": 0.2877, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.7270194986072425, |
|
"grad_norm": 2.2401917797761537, |
|
"learning_rate": 4.554801936354157e-06, |
|
"loss": 0.2916, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.7298050139275767, |
|
"grad_norm": 2.555486374304575, |
|
"learning_rate": 4.53865820268349e-06, |
|
"loss": 0.2706, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.732590529247911, |
|
"grad_norm": 1.5471616557871046, |
|
"learning_rate": 4.5225193182632675e-06, |
|
"loss": 0.2616, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.7353760445682451, |
|
"grad_norm": 1.7575174059438947, |
|
"learning_rate": 4.506385452732338e-06, |
|
"loss": 0.2976, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.7381615598885793, |
|
"grad_norm": 1.538042673072759, |
|
"learning_rate": 4.4902567756767976e-06, |
|
"loss": 0.3002, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.7409470752089136, |
|
"grad_norm": 3.233969582559621, |
|
"learning_rate": 4.474133456628208e-06, |
|
"loss": 0.3025, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.7437325905292478, |
|
"grad_norm": 1.5895671971985261, |
|
"learning_rate": 4.458015665061807e-06, |
|
"loss": 0.2929, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.7465181058495822, |
|
"grad_norm": 1.637088400218402, |
|
"learning_rate": 4.441903570394739e-06, |
|
"loss": 0.2733, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.7493036211699164, |
|
"grad_norm": 2.578153019974749, |
|
"learning_rate": 4.425797341984258e-06, |
|
"loss": 0.2952, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.7520891364902507, |
|
"grad_norm": 1.6009812753741126, |
|
"learning_rate": 4.409697149125964e-06, |
|
"loss": 0.2766, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.754874651810585, |
|
"grad_norm": 1.647918768651113, |
|
"learning_rate": 4.3936031610520126e-06, |
|
"loss": 0.2953, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.7576601671309193, |
|
"grad_norm": 3.338238305769451, |
|
"learning_rate": 4.377515546929341e-06, |
|
"loss": 0.2747, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.7604456824512535, |
|
"grad_norm": 3.07865863947678, |
|
"learning_rate": 4.361434475857891e-06, |
|
"loss": 0.2707, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.7632311977715878, |
|
"grad_norm": 3.21194223039647, |
|
"learning_rate": 4.3453601168688225e-06, |
|
"loss": 0.2742, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.766016713091922, |
|
"grad_norm": 2.018552105986319, |
|
"learning_rate": 4.329292638922753e-06, |
|
"loss": 0.254, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.7688022284122562, |
|
"grad_norm": 2.1295786602437774, |
|
"learning_rate": 4.313232210907959e-06, |
|
"loss": 0.2841, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.7715877437325904, |
|
"grad_norm": 1.4585424987298985, |
|
"learning_rate": 4.297179001638629e-06, |
|
"loss": 0.2929, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.7743732590529246, |
|
"grad_norm": 2.3433110644707567, |
|
"learning_rate": 4.281133179853061e-06, |
|
"loss": 0.2775, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.777158774373259, |
|
"grad_norm": 2.63329823393862, |
|
"learning_rate": 4.2650949142119116e-06, |
|
"loss": 0.292, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.7799442896935933, |
|
"grad_norm": 1.6745931385231543, |
|
"learning_rate": 4.249064373296403e-06, |
|
"loss": 0.231, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.7827298050139275, |
|
"grad_norm": 1.554115156478474, |
|
"learning_rate": 4.233041725606573e-06, |
|
"loss": 0.2892, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.785515320334262, |
|
"grad_norm": 2.3314227781818566, |
|
"learning_rate": 4.2170271395594855e-06, |
|
"loss": 0.2911, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.7883008356545962, |
|
"grad_norm": 1.5329121690771084, |
|
"learning_rate": 4.201020783487465e-06, |
|
"loss": 0.2689, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.7910863509749304, |
|
"grad_norm": 1.2504499985221544, |
|
"learning_rate": 4.185022825636334e-06, |
|
"loss": 0.2804, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.7938718662952646, |
|
"grad_norm": 1.7462454851135107, |
|
"learning_rate": 4.169033434163637e-06, |
|
"loss": 0.2518, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.7966573816155988, |
|
"grad_norm": 2.0268595909150484, |
|
"learning_rate": 4.153052777136879e-06, |
|
"loss": 0.283, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.799442896935933, |
|
"grad_norm": 1.6364512215335187, |
|
"learning_rate": 4.137081022531748e-06, |
|
"loss": 0.2951, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.8022284122562673, |
|
"grad_norm": 2.080242740008878, |
|
"learning_rate": 4.121118338230369e-06, |
|
"loss": 0.2845, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.8050139275766015, |
|
"grad_norm": 2.3388483649839684, |
|
"learning_rate": 4.105164892019514e-06, |
|
"loss": 0.2859, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.807799442896936, |
|
"grad_norm": 3.0511340224107393, |
|
"learning_rate": 4.089220851588861e-06, |
|
"loss": 0.285, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.8105849582172702, |
|
"grad_norm": 1.450130569619048, |
|
"learning_rate": 4.0732863845292204e-06, |
|
"loss": 0.2833, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.8133704735376046, |
|
"grad_norm": 2.5167509633345397, |
|
"learning_rate": 4.0573616583307705e-06, |
|
"loss": 0.2645, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.8161559888579388, |
|
"grad_norm": 2.400008088635949, |
|
"learning_rate": 4.041446840381309e-06, |
|
"loss": 0.268, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.818941504178273, |
|
"grad_norm": 3.7297497983648897, |
|
"learning_rate": 4.025542097964478e-06, |
|
"loss": 0.3006, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.8217270194986073, |
|
"grad_norm": 2.7706549383988706, |
|
"learning_rate": 4.009647598258022e-06, |
|
"loss": 0.2914, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.8245125348189415, |
|
"grad_norm": 2.0189831546163544, |
|
"learning_rate": 3.993763508332014e-06, |
|
"loss": 0.2983, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.8272980501392757, |
|
"grad_norm": 2.714527480715167, |
|
"learning_rate": 3.977889995147114e-06, |
|
"loss": 0.2891, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.83008356545961, |
|
"grad_norm": 4.858060299847715, |
|
"learning_rate": 3.962027225552807e-06, |
|
"loss": 0.2999, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.8328690807799441, |
|
"grad_norm": 4.020323011063082, |
|
"learning_rate": 3.946175366285647e-06, |
|
"loss": 0.2652, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.8356545961002786, |
|
"grad_norm": 3.3050835359023982, |
|
"learning_rate": 3.930334583967514e-06, |
|
"loss": 0.297, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.8384401114206128, |
|
"grad_norm": 1.905052551154398, |
|
"learning_rate": 3.914505045103845e-06, |
|
"loss": 0.2926, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.841225626740947, |
|
"grad_norm": 1.6726311395622961, |
|
"learning_rate": 3.898686916081909e-06, |
|
"loss": 0.2709, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.8440111420612815, |
|
"grad_norm": 2.065773609243907, |
|
"learning_rate": 3.88288036316903e-06, |
|
"loss": 0.2725, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.8467966573816157, |
|
"grad_norm": 1.5406341716366356, |
|
"learning_rate": 3.867085552510865e-06, |
|
"loss": 0.2742, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.84958217270195, |
|
"grad_norm": 2.66104679700073, |
|
"learning_rate": 3.851302650129637e-06, |
|
"loss": 0.2872, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.8523676880222841, |
|
"grad_norm": 1.9177803744408088, |
|
"learning_rate": 3.835531821922405e-06, |
|
"loss": 0.2824, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.8551532033426184, |
|
"grad_norm": 1.341353633110001, |
|
"learning_rate": 3.819773233659314e-06, |
|
"loss": 0.2894, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.8579387186629526, |
|
"grad_norm": 3.2217723105752927, |
|
"learning_rate": 3.8040270509818446e-06, |
|
"loss": 0.2863, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.8607242339832868, |
|
"grad_norm": 1.810952694376957, |
|
"learning_rate": 3.788293439401093e-06, |
|
"loss": 0.2929, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.863509749303621, |
|
"grad_norm": 1.7234836180040496, |
|
"learning_rate": 3.7725725642960047e-06, |
|
"loss": 0.2839, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.8662952646239555, |
|
"grad_norm": 2.382665280644848, |
|
"learning_rate": 3.7568645909116608e-06, |
|
"loss": 0.2988, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.8690807799442897, |
|
"grad_norm": 2.50784068736572, |
|
"learning_rate": 3.741169684357522e-06, |
|
"loss": 0.2694, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.8718662952646241, |
|
"grad_norm": 1.6094800032427474, |
|
"learning_rate": 3.725488009605708e-06, |
|
"loss": 0.3036, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.8746518105849583, |
|
"grad_norm": 1.2880733937082294, |
|
"learning_rate": 3.7098197314892493e-06, |
|
"loss": 0.2837, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.8774373259052926, |
|
"grad_norm": 1.6264434546927888, |
|
"learning_rate": 3.6941650147003655e-06, |
|
"loss": 0.2659, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.8802228412256268, |
|
"grad_norm": 1.6575404014019213, |
|
"learning_rate": 3.6785240237887355e-06, |
|
"loss": 0.2654, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.883008356545961, |
|
"grad_norm": 2.013031269138201, |
|
"learning_rate": 3.662896923159752e-06, |
|
"loss": 0.2834, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.8857938718662952, |
|
"grad_norm": 1.3371109973237258, |
|
"learning_rate": 3.647283877072815e-06, |
|
"loss": 0.2687, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.8885793871866294, |
|
"grad_norm": 1.3013361932750283, |
|
"learning_rate": 3.6316850496395863e-06, |
|
"loss": 0.2787, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.8913649025069637, |
|
"grad_norm": 1.724936775595971, |
|
"learning_rate": 3.616100604822279e-06, |
|
"loss": 0.2967, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.894150417827298, |
|
"grad_norm": 2.211205679203772, |
|
"learning_rate": 3.600530706431922e-06, |
|
"loss": 0.2936, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.8969359331476323, |
|
"grad_norm": 2.1187547944334173, |
|
"learning_rate": 3.584975518126648e-06, |
|
"loss": 0.2761, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.8997214484679665, |
|
"grad_norm": 1.7002511984987028, |
|
"learning_rate": 3.569435203409972e-06, |
|
"loss": 0.2721, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.902506963788301, |
|
"grad_norm": 2.3615417353432404, |
|
"learning_rate": 3.5539099256290616e-06, |
|
"loss": 0.2735, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.9052924791086352, |
|
"grad_norm": 1.6520804275200633, |
|
"learning_rate": 3.5383998479730357e-06, |
|
"loss": 0.2738, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.9080779944289694, |
|
"grad_norm": 1.5753725225222726, |
|
"learning_rate": 3.522905133471237e-06, |
|
"loss": 0.2685, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.9108635097493036, |
|
"grad_norm": 2.7152287875443535, |
|
"learning_rate": 3.507425944991529e-06, |
|
"loss": 0.2708, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.9136490250696379, |
|
"grad_norm": 1.7764603433032362, |
|
"learning_rate": 3.491962445238569e-06, |
|
"loss": 0.2858, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.916434540389972, |
|
"grad_norm": 1.5541677040584143, |
|
"learning_rate": 3.4765147967521174e-06, |
|
"loss": 0.2752, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.9192200557103063, |
|
"grad_norm": 1.720958192237286, |
|
"learning_rate": 3.461083161905311e-06, |
|
"loss": 0.2798, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.9220055710306405, |
|
"grad_norm": 1.7468203695774724, |
|
"learning_rate": 3.4456677029029687e-06, |
|
"loss": 0.2694, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.924791086350975, |
|
"grad_norm": 1.7819065057139172, |
|
"learning_rate": 3.430268581779883e-06, |
|
"loss": 0.2975, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.9275766016713092, |
|
"grad_norm": 1.5298786566316103, |
|
"learning_rate": 3.41488596039911e-06, |
|
"loss": 0.2644, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.9303621169916436, |
|
"grad_norm": 2.43834820222927, |
|
"learning_rate": 3.3995200004502814e-06, |
|
"loss": 0.2725, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.9331476323119778, |
|
"grad_norm": 2.215566933351255, |
|
"learning_rate": 3.38417086344789e-06, |
|
"loss": 0.277, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.935933147632312, |
|
"grad_norm": 2.1716863264964026, |
|
"learning_rate": 3.368838710729605e-06, |
|
"loss": 0.2718, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.9387186629526463, |
|
"grad_norm": 1.926639864976258, |
|
"learning_rate": 3.3535237034545677e-06, |
|
"loss": 0.2931, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.9415041782729805, |
|
"grad_norm": 2.6738709060793515, |
|
"learning_rate": 3.3382260026017027e-06, |
|
"loss": 0.2952, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.9442896935933147, |
|
"grad_norm": 1.2335384040113437, |
|
"learning_rate": 3.322945768968021e-06, |
|
"loss": 0.272, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.947075208913649, |
|
"grad_norm": 2.0560976799275967, |
|
"learning_rate": 3.307683163166934e-06, |
|
"loss": 0.2646, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.9498607242339832, |
|
"grad_norm": 1.8834990051364848, |
|
"learning_rate": 3.292438345626565e-06, |
|
"loss": 0.2619, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.9526462395543176, |
|
"grad_norm": 2.345253862178042, |
|
"learning_rate": 3.277211476588057e-06, |
|
"loss": 0.2714, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.9554317548746518, |
|
"grad_norm": 1.7656333697527244, |
|
"learning_rate": 3.2620027161038975e-06, |
|
"loss": 0.2612, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.958217270194986, |
|
"grad_norm": 2.4627444358737445, |
|
"learning_rate": 3.2468122240362287e-06, |
|
"loss": 0.277, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.9610027855153205, |
|
"grad_norm": 1.9619001062420491, |
|
"learning_rate": 3.231640160055172e-06, |
|
"loss": 0.2655, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.9637883008356547, |
|
"grad_norm": 1.7314224138583887, |
|
"learning_rate": 3.216486683637146e-06, |
|
"loss": 0.3032, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.966573816155989, |
|
"grad_norm": 2.1427001073420877, |
|
"learning_rate": 3.2013519540631954e-06, |
|
"loss": 0.2832, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.9693593314763231, |
|
"grad_norm": 1.463234478052995, |
|
"learning_rate": 3.186236130417306e-06, |
|
"loss": 0.2776, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.9721448467966574, |
|
"grad_norm": 2.0165756401269728, |
|
"learning_rate": 3.1711393715847477e-06, |
|
"loss": 0.2721, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.9749303621169916, |
|
"grad_norm": 1.661579629661388, |
|
"learning_rate": 3.1560618362503937e-06, |
|
"loss": 0.2571, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.9777158774373258, |
|
"grad_norm": 2.0440524192749736, |
|
"learning_rate": 3.1410036828970525e-06, |
|
"loss": 0.2748, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.98050139275766, |
|
"grad_norm": 2.6198997032617375, |
|
"learning_rate": 3.1259650698038106e-06, |
|
"loss": 0.2781, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.9832869080779945, |
|
"grad_norm": 2.8920105843149027, |
|
"learning_rate": 3.1109461550443574e-06, |
|
"loss": 0.2804, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.9860724233983287, |
|
"grad_norm": 1.7888387755847777, |
|
"learning_rate": 3.095947096485335e-06, |
|
"loss": 0.2767, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.988857938718663, |
|
"grad_norm": 2.746294053725262, |
|
"learning_rate": 3.0809680517846664e-06, |
|
"loss": 0.2701, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.9916434540389973, |
|
"grad_norm": 3.326649747180231, |
|
"learning_rate": 3.0660091783899117e-06, |
|
"loss": 0.2981, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.9944289693593316, |
|
"grad_norm": 2.348981315443004, |
|
"learning_rate": 3.0510706335366034e-06, |
|
"loss": 0.2846, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.9972144846796658, |
|
"grad_norm": 1.9850382372078157, |
|
"learning_rate": 3.0361525742465975e-06, |
|
"loss": 0.291, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 2.6767932455172887, |
|
"learning_rate": 3.0212551573264224e-06, |
|
"loss": 0.2888, |
|
"step": 718 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1077, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2267886338179072.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|