|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 1089, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0027548209366391185, |
|
"grad_norm": 1624.0, |
|
"learning_rate": 6.060606060606061e-06, |
|
"loss": 18.9199, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005509641873278237, |
|
"grad_norm": 1640.0, |
|
"learning_rate": 1.2121212121212122e-05, |
|
"loss": 18.8697, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.008264462809917356, |
|
"grad_norm": 1584.0, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 17.1905, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.011019283746556474, |
|
"grad_norm": 640.0, |
|
"learning_rate": 2.4242424242424244e-05, |
|
"loss": 12.3268, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.013774104683195593, |
|
"grad_norm": 552.0, |
|
"learning_rate": 3.0303030303030306e-05, |
|
"loss": 12.8585, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01652892561983471, |
|
"grad_norm": 374.0, |
|
"learning_rate": 3.6363636363636364e-05, |
|
"loss": 12.3047, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01928374655647383, |
|
"grad_norm": 186.0, |
|
"learning_rate": 4.242424242424243e-05, |
|
"loss": 10.8696, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02203856749311295, |
|
"grad_norm": 104.5, |
|
"learning_rate": 4.848484848484849e-05, |
|
"loss": 9.2818, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.024793388429752067, |
|
"grad_norm": 75.5, |
|
"learning_rate": 5.4545454545454546e-05, |
|
"loss": 8.1257, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.027548209366391185, |
|
"grad_norm": 33.25, |
|
"learning_rate": 6.060606060606061e-05, |
|
"loss": 7.371, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.030303030303030304, |
|
"grad_norm": 19.75, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 6.9551, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03305785123966942, |
|
"grad_norm": 15.25, |
|
"learning_rate": 7.272727272727273e-05, |
|
"loss": 6.7196, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03581267217630854, |
|
"grad_norm": 29.375, |
|
"learning_rate": 7.878787878787879e-05, |
|
"loss": 6.5858, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03856749311294766, |
|
"grad_norm": 19.625, |
|
"learning_rate": 8.484848484848486e-05, |
|
"loss": 6.5112, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04132231404958678, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 9.090909090909092e-05, |
|
"loss": 6.3153, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0440771349862259, |
|
"grad_norm": 14.3125, |
|
"learning_rate": 9.696969696969698e-05, |
|
"loss": 6.1753, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.046831955922865015, |
|
"grad_norm": 11.9375, |
|
"learning_rate": 0.00010303030303030303, |
|
"loss": 6.034, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.049586776859504134, |
|
"grad_norm": 22.25, |
|
"learning_rate": 0.00010909090909090909, |
|
"loss": 6.0472, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05234159779614325, |
|
"grad_norm": 8.75, |
|
"learning_rate": 0.00011515151515151516, |
|
"loss": 5.8011, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.05509641873278237, |
|
"grad_norm": 14.625, |
|
"learning_rate": 0.00012121212121212122, |
|
"loss": 5.565, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05785123966942149, |
|
"grad_norm": 60.75, |
|
"learning_rate": 0.00012727272727272728, |
|
"loss": 5.4288, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.06060606060606061, |
|
"grad_norm": 19.125, |
|
"learning_rate": 0.00013333333333333334, |
|
"loss": 4.9008, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.06336088154269973, |
|
"grad_norm": 21.625, |
|
"learning_rate": 0.0001393939393939394, |
|
"loss": 4.1849, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.06611570247933884, |
|
"grad_norm": 77.0, |
|
"learning_rate": 0.00014545454545454546, |
|
"loss": 4.1083, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06887052341597796, |
|
"grad_norm": 19.0, |
|
"learning_rate": 0.00015151515151515152, |
|
"loss": 3.0481, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07162534435261708, |
|
"grad_norm": 17.75, |
|
"learning_rate": 0.00015757575757575757, |
|
"loss": 2.3526, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0743801652892562, |
|
"grad_norm": 20.125, |
|
"learning_rate": 0.00016363636363636366, |
|
"loss": 1.943, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07713498622589532, |
|
"grad_norm": 5.625, |
|
"learning_rate": 0.00016969696969696972, |
|
"loss": 1.6061, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.07988980716253444, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 0.00017575757575757578, |
|
"loss": 1.3966, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.08264462809917356, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 0.00018181818181818183, |
|
"loss": 1.2738, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08539944903581267, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 0.0001878787878787879, |
|
"loss": 1.2299, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0881542699724518, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 0.00019393939393939395, |
|
"loss": 1.0751, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 1.875, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0609, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.09366391184573003, |
|
"grad_norm": 2.125, |
|
"learning_rate": 0.00019999955747114604, |
|
"loss": 0.9572, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.09641873278236915, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 0.00019999822988850082, |
|
"loss": 0.9422, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09917355371900827, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 0.00019999601726381413, |
|
"loss": 0.854, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.10192837465564739, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 0.00019999291961666908, |
|
"loss": 0.8462, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1046831955922865, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 0.0001999889369744816, |
|
"loss": 0.7722, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.10743801652892562, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 0.00019998406937250034, |
|
"loss": 0.7679, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.11019283746556474, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 0.00019997831685380642, |
|
"loss": 0.7679, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.11294765840220386, |
|
"grad_norm": 0.81640625, |
|
"learning_rate": 0.0001999716794693129, |
|
"loss": 0.753, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.11570247933884298, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 0.00019996415727776455, |
|
"loss": 0.7336, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1184573002754821, |
|
"grad_norm": 0.79296875, |
|
"learning_rate": 0.00019995575034573705, |
|
"loss": 0.7555, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.12121212121212122, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 0.00019994645874763658, |
|
"loss": 0.717, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.12396694214876033, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 0.0001999362825656992, |
|
"loss": 0.7194, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.12672176308539945, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 0.00019992522188998994, |
|
"loss": 0.678, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.12947658402203857, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 0.00019991327681840218, |
|
"loss": 0.7154, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.1322314049586777, |
|
"grad_norm": 0.68359375, |
|
"learning_rate": 0.00019990044745665672, |
|
"loss": 0.6698, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1349862258953168, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 0.0001998867339183008, |
|
"loss": 0.696, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.13774104683195593, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 0.00019987213632470717, |
|
"loss": 0.6746, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.14049586776859505, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.0001998566548050729, |
|
"loss": 0.6681, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.14325068870523416, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 0.0001998402894964184, |
|
"loss": 0.6078, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.14600550964187328, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 0.00019982304054358614, |
|
"loss": 0.6642, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.1487603305785124, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 0.00019980490809923926, |
|
"loss": 0.6543, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.15151515151515152, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.00019978589232386035, |
|
"loss": 0.6227, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.15426997245179064, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.00019976599338575004, |
|
"loss": 0.644, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.15702479338842976, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 0.00019974521146102537, |
|
"loss": 0.6395, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.15977961432506887, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 0.0001997235467336184, |
|
"loss": 0.6027, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.162534435261708, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.0001997009993952744, |
|
"loss": 0.6463, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.1652892561983471, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 0.00019967756964555045, |
|
"loss": 0.6159, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.16804407713498623, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.00019965325769181325, |
|
"loss": 0.6143, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.17079889807162535, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00019962806374923764, |
|
"loss": 0.6104, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.17355371900826447, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.0001996019880408046, |
|
"loss": 0.6274, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1763085399449036, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00019957503079729916, |
|
"loss": 0.6072, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.1790633608815427, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 0.00019954719225730847, |
|
"loss": 0.6298, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.0001995184726672197, |
|
"loss": 0.5852, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.18457300275482094, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.00019948887228121777, |
|
"loss": 0.6086, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.18732782369146006, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 0.0001994583913612832, |
|
"loss": 0.6177, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.19008264462809918, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.00019942703017718975, |
|
"loss": 0.5989, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.1928374655647383, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.00019939478900650193, |
|
"loss": 0.5716, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.19559228650137742, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00019936166813457274, |
|
"loss": 0.5617, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.19834710743801653, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.000199327667854541, |
|
"loss": 0.5655, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.20110192837465565, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 0.00019929278846732884, |
|
"loss": 0.6169, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.20385674931129477, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00019925703028163892, |
|
"loss": 0.5933, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.2066115702479339, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.00019922039361395185, |
|
"loss": 0.5748, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.209366391184573, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.0001991828787885233, |
|
"loss": 0.5967, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.21212121212121213, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 0.00019914448613738106, |
|
"loss": 0.5905, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.21487603305785125, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.00019910521600032227, |
|
"loss": 0.5392, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.21763085399449036, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.00019906506872491034, |
|
"loss": 0.5568, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.22038567493112948, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.0001990240446664718, |
|
"loss": 0.56, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2231404958677686, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.0001989821441880933, |
|
"loss": 0.578, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.22589531680440772, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00019893936766061812, |
|
"loss": 0.5757, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.22865013774104684, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00019889571546264335, |
|
"loss": 0.5996, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.23140495867768596, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.00019885118798051605, |
|
"loss": 0.5746, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.23415977961432508, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.00019880578560833016, |
|
"loss": 0.5497, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2369146005509642, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.0001987595087479229, |
|
"loss": 0.5716, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2396694214876033, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.00019871235780887113, |
|
"loss": 0.5817, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.24242424242424243, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.0001986643332084879, |
|
"loss": 0.573, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.24517906336088155, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.00019861543537181867, |
|
"loss": 0.5579, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.24793388429752067, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.00019856566473163746, |
|
"loss": 0.5799, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.25068870523415976, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.00019851502172844317, |
|
"loss": 0.5565, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2534435261707989, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.00019846350681045568, |
|
"loss": 0.5515, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.256198347107438, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.0001984111204336116, |
|
"loss": 0.5734, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.25895316804407714, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00019835786306156072, |
|
"loss": 0.5379, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.26170798898071623, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.00019830373516566146, |
|
"loss": 0.5603, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2644628099173554, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.00019824873722497694, |
|
"loss": 0.5444, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.26721763085399447, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.00019819286972627066, |
|
"loss": 0.5374, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.2699724517906336, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00019813613316400227, |
|
"loss": 0.5585, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00019807852804032305, |
|
"loss": 0.561, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.27548209366391185, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.0001980200548650716, |
|
"loss": 0.5279, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.27823691460055094, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00019796071415576925, |
|
"loss": 0.5487, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.2809917355371901, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00019790050643761552, |
|
"loss": 0.5525, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.2837465564738292, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00019783943224348352, |
|
"loss": 0.5848, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.2865013774104683, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.00019777749211391502, |
|
"loss": 0.514, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.2892561983471074, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00019771468659711595, |
|
"loss": 0.5564, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.29201101928374656, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.00019765101624895143, |
|
"loss": 0.5158, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.29476584022038566, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.0001975864816329407, |
|
"loss": 0.5225, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.2975206611570248, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.0001975210833202524, |
|
"loss": 0.552, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.3002754820936639, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.0001974548218896993, |
|
"loss": 0.5314, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.30303030303030304, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00019738769792773336, |
|
"loss": 0.5422, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.30578512396694213, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.00019731971202844036, |
|
"loss": 0.5293, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3085399449035813, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.0001972508647935347, |
|
"loss": 0.5217, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.31129476584022037, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00019718115683235417, |
|
"loss": 0.5319, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.3140495867768595, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00019711058876185447, |
|
"loss": 0.5627, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.3168044077134986, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.0001970391612066037, |
|
"loss": 0.5335, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.31955922865013775, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.000196966874798777, |
|
"loss": 0.5388, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.32231404958677684, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.00019689373017815073, |
|
"loss": 0.5201, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.325068870523416, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.00019681972799209704, |
|
"loss": 0.5424, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.3278236914600551, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.000196744868895578, |
|
"loss": 0.5341, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3305785123966942, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00019666915355113975, |
|
"loss": 0.5332, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.00019659258262890683, |
|
"loss": 0.5247, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.33608815426997246, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00019651515680657608, |
|
"loss": 0.5323, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.33884297520661155, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.00019643687676941068, |
|
"loss": 0.5486, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.3415977961432507, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.0001963577432102342, |
|
"loss": 0.5052, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.3443526170798898, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.0001962777568294242, |
|
"loss": 0.5039, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.34710743801652894, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00019619691833490643, |
|
"loss": 0.5288, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.349862258953168, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00019611522844214813, |
|
"loss": 0.5205, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.3526170798898072, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.00019603268787415202, |
|
"loss": 0.542, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.35537190082644626, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00019594929736144976, |
|
"loss": 0.5236, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.3581267217630854, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.0001958650576420955, |
|
"loss": 0.5205, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3608815426997245, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.00019577996946165937, |
|
"loss": 0.5423, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.0001956940335732209, |
|
"loss": 0.5164, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.36639118457300274, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 0.00019560725073736226, |
|
"loss": 0.5386, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.3691460055096419, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.00019551962172216164, |
|
"loss": 0.5246, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.371900826446281, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.0001954311473031864, |
|
"loss": 0.503, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.3746556473829201, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.0001953418282634862, |
|
"loss": 0.542, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.3774104683195592, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.00019525166539358606, |
|
"loss": 0.5124, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.38016528925619836, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.00019516065949147943, |
|
"loss": 0.5084, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.38292011019283745, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.00019506881136262112, |
|
"loss": 0.5375, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.3856749311294766, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.00019497612181991998, |
|
"loss": 0.5138, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3884297520661157, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.00019488259168373197, |
|
"loss": 0.5049, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.39118457300275483, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.0001947882217818528, |
|
"loss": 0.5096, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.3939393939393939, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.0001946930129495106, |
|
"loss": 0.4893, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.39669421487603307, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.00019459696602935837, |
|
"loss": 0.5345, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.39944903581267216, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.00019450008187146684, |
|
"loss": 0.5522, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.4022038567493113, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00019440236133331674, |
|
"loss": 0.5099, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.4049586776859504, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.00019430380527979123, |
|
"loss": 0.509, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.40771349862258954, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.0001942044145831682, |
|
"loss": 0.5334, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.41046831955922863, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.00019410419012311268, |
|
"loss": 0.5144, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.4132231404958678, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.00019400313278666902, |
|
"loss": 0.5207, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.41597796143250687, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.00019390124346825295, |
|
"loss": 0.5321, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.418732782369146, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.00019379852306964374, |
|
"loss": 0.5192, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.4214876033057851, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.0001936949724999762, |
|
"loss": 0.5055, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.42424242424242425, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.0001935905926757326, |
|
"loss": 0.5222, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.42699724517906334, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.0001934853845207346, |
|
"loss": 0.5161, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.4297520661157025, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.00019337934896613516, |
|
"loss": 0.5383, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.4325068870523416, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.0001932724869504101, |
|
"loss": 0.5556, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.43526170798898073, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.0001931647994193499, |
|
"loss": 0.5111, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.4380165289256198, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.00019305628732605137, |
|
"loss": 0.4922, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.44077134986225897, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.00019294695163090916, |
|
"loss": 0.5191, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.44352617079889806, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.00019283679330160726, |
|
"loss": 0.5117, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.4462809917355372, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.00019272581331311047, |
|
"loss": 0.536, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.4490358126721763, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.00019261401264765573, |
|
"loss": 0.488, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.45179063360881544, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.0001925013922947434, |
|
"loss": 0.5257, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.0001923879532511287, |
|
"loss": 0.5125, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.4573002754820937, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.0001922736965208125, |
|
"loss": 0.5307, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.46005509641873277, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.0001921586231150329, |
|
"loss": 0.5044, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.4628099173553719, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.00019204273405225587, |
|
"loss": 0.5166, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.465564738292011, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00019192603035816656, |
|
"loss": 0.521, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.46831955922865015, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 0.00019180851306566, |
|
"loss": 0.5107, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.47107438016528924, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.00019169018321483198, |
|
"loss": 0.5087, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.4738292011019284, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.00019157104185297004, |
|
"loss": 0.5009, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.4765840220385675, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00019145109003454396, |
|
"loss": 0.521, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.4793388429752066, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.00019133032882119653, |
|
"loss": 0.536, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.4820936639118457, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00019120875928173422, |
|
"loss": 0.5197, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.48484848484848486, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.00019108638249211758, |
|
"loss": 0.4989, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.48760330578512395, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00019096319953545185, |
|
"loss": 0.5135, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.4903581267217631, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00019083921150197725, |
|
"loss": 0.5124, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.4931129476584022, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.00019071441948905943, |
|
"loss": 0.4891, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.49586776859504134, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.00019058882460117974, |
|
"loss": 0.4992, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.4986225895316804, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.00019046242794992538, |
|
"loss": 0.5117, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.5013774104683195, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.00019033523065397972, |
|
"loss": 0.5013, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.5041322314049587, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.00019020723383911215, |
|
"loss": 0.5084, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.5068870523415978, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.00019007843863816842, |
|
"loss": 0.4984, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.509641873278237, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 0.00018994884619106031, |
|
"loss": 0.5314, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.512396694214876, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.00018981845764475582, |
|
"loss": 0.517, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.5151515151515151, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00018968727415326884, |
|
"loss": 0.4851, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.5179063360881543, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00018955529687764893, |
|
"loss": 0.517, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.5206611570247934, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.00018942252698597113, |
|
"loss": 0.5006, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.5234159779614325, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00018928896565332566, |
|
"loss": 0.4963, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5261707988980716, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.00018915461406180731, |
|
"loss": 0.5444, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.5289256198347108, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.00018901947340050528, |
|
"loss": 0.5151, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.5316804407713499, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.00018888354486549237, |
|
"loss": 0.4973, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.5344352617079889, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00018874682965981454, |
|
"loss": 0.4932, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.5371900826446281, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.00018860932899348028, |
|
"loss": 0.4949, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5399449035812672, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.0001884710440834498, |
|
"loss": 0.5052, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.5426997245179064, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.0001883319761536244, |
|
"loss": 0.5119, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.0001881921264348355, |
|
"loss": 0.4726, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.5482093663911846, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.0001880514961648338, |
|
"loss": 0.4884, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.5509641873278237, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.0001879100865882784, |
|
"loss": 0.4953, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5537190082644629, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.00018776789895672558, |
|
"loss": 0.5015, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.5564738292011019, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.00018762493452861796, |
|
"loss": 0.5116, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.559228650137741, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.00018748119456927324, |
|
"loss": 0.4906, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.5619834710743802, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.00018733668035087302, |
|
"loss": 0.5098, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.5647382920110193, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00018719139315245148, |
|
"loss": 0.5101, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.5674931129476584, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.00018704533425988423, |
|
"loss": 0.5009, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.5702479338842975, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.00018689850496587674, |
|
"loss": 0.5054, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.5730027548209367, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.00018675090656995296, |
|
"loss": 0.4899, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.5757575757575758, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.00018660254037844388, |
|
"loss": 0.5127, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.5785123966942148, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.00018645340770447595, |
|
"loss": 0.4663, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.581267217630854, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00018630350986795934, |
|
"loss": 0.509, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.5840220385674931, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.00018615284819557647, |
|
"loss": 0.489, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.5867768595041323, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.00018600142402077006, |
|
"loss": 0.4974, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.5895316804407713, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.00018584923868373142, |
|
"loss": 0.4829, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.5922865013774105, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.00018569629353138863, |
|
"loss": 0.5064, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.5950413223140496, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.00018554258991739452, |
|
"loss": 0.4886, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.5977961432506887, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.0001853881292021148, |
|
"loss": 0.504, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.6005509641873278, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00018523291275261597, |
|
"loss": 0.4898, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.6033057851239669, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0001850769419426531, |
|
"loss": 0.4922, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.0001849202181526579, |
|
"loss": 0.4863, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6088154269972452, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.00018476274276972636, |
|
"loss": 0.4867, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.6115702479338843, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.0001846045171876065, |
|
"loss": 0.4871, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.6143250688705234, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.00018444554280668603, |
|
"loss": 0.4823, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.6170798898071626, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00018428582103397997, |
|
"loss": 0.492, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.6198347107438017, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.00018412535328311814, |
|
"loss": 0.4871, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.6225895316804407, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00018396414097433277, |
|
"loss": 0.5034, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.6253443526170799, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.0001838021855344458, |
|
"loss": 0.5144, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.628099173553719, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.00018363948839685636, |
|
"loss": 0.5138, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.6308539944903582, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.00018347605100152802, |
|
"loss": 0.4761, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.6336088154269972, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.00018331187479497603, |
|
"loss": 0.4727, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.6363636363636364, |
|
"grad_norm": 0.625, |
|
"learning_rate": 0.00018314696123025454, |
|
"loss": 0.5208, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.6391184573002755, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.00018298131176694378, |
|
"loss": 0.5015, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.6418732782369146, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00018281492787113708, |
|
"loss": 0.4731, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.6446280991735537, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.000182647811015428, |
|
"loss": 0.4776, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.6473829201101928, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00018247996267889706, |
|
"loss": 0.5096, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.650137741046832, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00018231138434709893, |
|
"loss": 0.4797, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.6528925619834711, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00018214207751204918, |
|
"loss": 0.4824, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.6556473829201102, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00018197204367221095, |
|
"loss": 0.5107, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.6584022038567493, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.00018180128433248184, |
|
"loss": 0.5057, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.6611570247933884, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.0001816298010041806, |
|
"loss": 0.4754, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.6639118457300276, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00018145759520503358, |
|
"loss": 0.4684, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00018128466845916154, |
|
"loss": 0.4781, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.6694214876033058, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.0001811110222970659, |
|
"loss": 0.4812, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.6721763085399449, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.00018093665825561552, |
|
"loss": 0.5041, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.6749311294765841, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.00018076157787803268, |
|
"loss": 0.5, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.6776859504132231, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.0001805857827138798, |
|
"loss": 0.4692, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.6804407713498623, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.00018040927431904553, |
|
"loss": 0.4593, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.6831955922865014, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00018023205425573096, |
|
"loss": 0.5098, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.6859504132231405, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00018005412409243606, |
|
"loss": 0.4892, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.6887052341597796, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.00017987548540394533, |
|
"loss": 0.5027, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.6914600550964187, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.0001796961397713143, |
|
"loss": 0.4778, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.6942148760330579, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.0001795160887818553, |
|
"loss": 0.4964, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.696969696969697, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00017933533402912354, |
|
"loss": 0.5048, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.699724517906336, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.0001791538771129028, |
|
"loss": 0.4973, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.7024793388429752, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.0001789717196391916, |
|
"loss": 0.4947, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.7052341597796143, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00017878886322018862, |
|
"loss": 0.4908, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.7079889807162535, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.00017860530947427875, |
|
"loss": 0.4599, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.7107438016528925, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.00017842106002601856, |
|
"loss": 0.5075, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.7134986225895317, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.00017823611650612194, |
|
"loss": 0.4909, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.7162534435261708, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00017805048055144585, |
|
"loss": 0.4758, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.71900826446281, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.00017786415380497553, |
|
"loss": 0.4698, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.721763085399449, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.00017767713791581026, |
|
"loss": 0.4707, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.7245179063360881, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.00017748943453914858, |
|
"loss": 0.4895, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.0001773010453362737, |
|
"loss": 0.4671, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.7300275482093664, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.00017711197197453878, |
|
"loss": 0.4893, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.7327823691460055, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.00017692221612735214, |
|
"loss": 0.4913, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.7355371900826446, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00017673177947416258, |
|
"loss": 0.4797, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.7382920110192838, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.00017654066370044436, |
|
"loss": 0.486, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.7410468319559229, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.00017634887049768237, |
|
"loss": 0.5123, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.743801652892562, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00017615640156335712, |
|
"loss": 0.4724, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.7465564738292011, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.00017596325860092974, |
|
"loss": 0.485, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.7493112947658402, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.00017576944331982696, |
|
"loss": 0.4708, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.7520661157024794, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.00017557495743542585, |
|
"loss": 0.4909, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.7548209366391184, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 0.00017537980266903866, |
|
"loss": 0.4668, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.7575757575757576, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.00017518398074789775, |
|
"loss": 0.4804, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.7603305785123967, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.0001749874934051401, |
|
"loss": 0.4815, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.7630853994490359, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.0001747903423797921, |
|
"loss": 0.4844, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.7658402203856749, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.00017459252941675409, |
|
"loss": 0.4798, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.768595041322314, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.00017439405626678496, |
|
"loss": 0.508, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.7713498622589532, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.00017419492468648658, |
|
"loss": 0.4928, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.7741046831955923, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.0001739951364382884, |
|
"loss": 0.4841, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.7768595041322314, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.00017379469329043167, |
|
"loss": 0.5048, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.7796143250688705, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00017359359701695396, |
|
"loss": 0.4564, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.7823691460055097, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.0001733918493976733, |
|
"loss": 0.4736, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.7851239669421488, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.00017318945221817255, |
|
"loss": 0.4782, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.7878787878787878, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.00017298640726978357, |
|
"loss": 0.4577, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.790633608815427, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.0001727827163495713, |
|
"loss": 0.4815, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.7933884297520661, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.00017257838126031797, |
|
"loss": 0.4836, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.7961432506887053, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.00017237340381050703, |
|
"loss": 0.4938, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.7988980716253443, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.00017216778581430716, |
|
"loss": 0.4921, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.8016528925619835, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00017196152909155628, |
|
"loss": 0.4793, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.8044077134986226, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 0.0001717546354677455, |
|
"loss": 0.4712, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.8071625344352618, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.00017154710677400265, |
|
"loss": 0.4825, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.8099173553719008, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.00017133894484707655, |
|
"loss": 0.481, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.8126721763085399, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.00017113015152932034, |
|
"loss": 0.5066, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.8154269972451791, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00017092072866867534, |
|
"loss": 0.473, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.8181818181818182, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.00017071067811865476, |
|
"loss": 0.4743, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.8209366391184573, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.00017050000173832714, |
|
"loss": 0.4692, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.8236914600550964, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.0001702887013923001, |
|
"loss": 0.4613, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.8264462809917356, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00017007677895070357, |
|
"loss": 0.4771, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.8292011019283747, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.00016986423628917346, |
|
"loss": 0.4685, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.8319559228650137, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.00016965107528883503, |
|
"loss": 0.4753, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.8347107438016529, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.00016943729783628608, |
|
"loss": 0.4836, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.837465564738292, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.00016922290582358044, |
|
"loss": 0.4545, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.8402203856749312, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.00016900790114821122, |
|
"loss": 0.5018, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.8429752066115702, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.00016879228571309377, |
|
"loss": 0.4917, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.8457300275482094, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.00016857606142654918, |
|
"loss": 0.4691, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.8484848484848485, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.00016835923020228712, |
|
"loss": 0.4738, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.8512396694214877, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00016814179395938913, |
|
"loss": 0.4974, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.8539944903581267, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.00016792375462229133, |
|
"loss": 0.4819, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.8567493112947658, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.0001677051141207678, |
|
"loss": 0.5076, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.859504132231405, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.00016748587438991303, |
|
"loss": 0.4875, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.8622589531680441, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00016726603737012529, |
|
"loss": 0.4792, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.8650137741046832, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.0001670456050070889, |
|
"loss": 0.4967, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.8677685950413223, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00016682457925175763, |
|
"loss": 0.4549, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.8705234159779615, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.00016660296206033693, |
|
"loss": 0.4735, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.8732782369146006, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00016638075539426677, |
|
"loss": 0.4708, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.8760330578512396, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.00016615796122020443, |
|
"loss": 0.4614, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.8787878787878788, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 0.00016593458151000688, |
|
"loss": 0.5232, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.8815426997245179, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 0.00016571061824071354, |
|
"loss": 0.5013, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.8842975206611571, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00016548607339452853, |
|
"loss": 0.4825, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.8870523415977961, |
|
"grad_norm": 0.84375, |
|
"learning_rate": 0.0001652609489588033, |
|
"loss": 0.5196, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.8898071625344353, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.00016503524692601908, |
|
"loss": 0.4606, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.8925619834710744, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.00016480896929376907, |
|
"loss": 0.4919, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.8953168044077136, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.00016458211806474088, |
|
"loss": 0.4736, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.8980716253443526, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.00016435469524669885, |
|
"loss": 0.478, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.9008264462809917, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.0001641267028524661, |
|
"loss": 0.4843, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.9035812672176309, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.00016389814289990694, |
|
"loss": 0.4677, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.90633608815427, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.00016366901741190882, |
|
"loss": 0.4844, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.00016343932841636456, |
|
"loss": 0.4593, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.9118457300275482, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.00016320907794615427, |
|
"loss": 0.477, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.9146005509641874, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.00016297826803912755, |
|
"loss": 0.4878, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.9173553719008265, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.0001627469007380852, |
|
"loss": 0.4824, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.9201101928374655, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.00016251497809076143, |
|
"loss": 0.4805, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.9228650137741047, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.0001622825021498054, |
|
"loss": 0.4774, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.9256198347107438, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.00016204947497276345, |
|
"loss": 0.4725, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.928374655647383, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.00016181589862206052, |
|
"loss": 0.4831, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.931129476584022, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.00016158177516498214, |
|
"loss": 0.4687, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.9338842975206612, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.00016134710667365596, |
|
"loss": 0.4742, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.9366391184573003, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.00016111189522503359, |
|
"loss": 0.4629, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.9393939393939394, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.00016087614290087208, |
|
"loss": 0.4617, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.9421487603305785, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.00016063985178771555, |
|
"loss": 0.4893, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.9449035812672176, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 0.00016040302397687665, |
|
"loss": 0.4857, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.9476584022038568, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.00016016566156441827, |
|
"loss": 0.4803, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.9504132231404959, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.0001599277666511347, |
|
"loss": 0.4898, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.953168044077135, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.0001596893413425332, |
|
"loss": 0.4965, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.9559228650137741, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00015945038774881534, |
|
"loss": 0.4748, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.9586776859504132, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.00015921090798485832, |
|
"loss": 0.4843, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.9614325068870524, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.0001589709041701962, |
|
"loss": 0.4609, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.9641873278236914, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00015873037842900137, |
|
"loss": 0.4519, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.9669421487603306, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.0001584893328900653, |
|
"loss": 0.4736, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.9696969696969697, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.00015824776968678024, |
|
"loss": 0.4806, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.9724517906336089, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.00015800569095711982, |
|
"loss": 0.5068, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.9752066115702479, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.0001577630988436206, |
|
"loss": 0.4547, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.977961432506887, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0001575199954933627, |
|
"loss": 0.4729, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.9807162534435262, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.0001572763830579511, |
|
"loss": 0.4616, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.9834710743801653, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.0001570322636934964, |
|
"loss": 0.4631, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.9862258953168044, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.00015678763956059592, |
|
"loss": 0.4784, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.9889807162534435, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.00015654251282431437, |
|
"loss": 0.4613, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.9917355371900827, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.00015629688565416478, |
|
"loss": 0.4409, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.9944903581267218, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.0001560507602240894, |
|
"loss": 0.4585, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.9972451790633609, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.00015580413871244035, |
|
"loss": 0.4528, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.00015555702330196023, |
|
"loss": 0.4772, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_train_loss": 0.46483051776885986, |
|
"eval_train_runtime": 8.4396, |
|
"eval_train_samples_per_second": 159.604, |
|
"eval_train_steps_per_second": 20.025, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.002754820936639, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.0001553094161797631, |
|
"loss": 0.4305, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.0055096418732783, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.0001550613195373149, |
|
"loss": 0.431, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.0082644628099173, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.000154812735570414, |
|
"loss": 0.444, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.0110192837465566, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.00015456366647917207, |
|
"loss": 0.4232, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.0137741046831956, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.0001543141144679942, |
|
"loss": 0.4234, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.0165289256198347, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.00015406408174555976, |
|
"loss": 0.4268, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.019283746556474, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.0001538135705248026, |
|
"loss": 0.429, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.022038567493113, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00015356258302289165, |
|
"loss": 0.4307, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.024793388429752, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.00015331112146121104, |
|
"loss": 0.4068, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.0275482093663912, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.00015305918806534082, |
|
"loss": 0.4102, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.0303030303030303, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0001528067850650368, |
|
"loss": 0.4267, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.0330578512396693, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.00015255391469421128, |
|
"loss": 0.4107, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.0358126721763086, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.000152300579190913, |
|
"loss": 0.4245, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.0385674931129476, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00015204678079730724, |
|
"loss": 0.4229, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.0413223140495869, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.00015179252175965633, |
|
"loss": 0.4371, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.044077134986226, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.00015153780432829945, |
|
"loss": 0.4068, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.046831955922865, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.0001512826307576329, |
|
"loss": 0.4152, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.0495867768595042, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.00015102700330609, |
|
"loss": 0.4284, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.0523415977961432, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.00015077092423612117, |
|
"loss": 0.4076, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.0550964187327825, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.00015051439581417406, |
|
"loss": 0.4264, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.0578512396694215, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.00015025742031067317, |
|
"loss": 0.4253, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.0606060606060606, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 0.4233, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.0633608815426998, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00014974213716047288, |
|
"loss": 0.4097, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.0661157024793388, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00014948383407432678, |
|
"loss": 0.4143, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.0688705234159779, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.00014922509302769298, |
|
"loss": 0.4124, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.0716253443526171, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.00014896591631057912, |
|
"loss": 0.4227, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.0743801652892562, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.00014870630621684872, |
|
"loss": 0.4449, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.0771349862258952, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.00014844626504420088, |
|
"loss": 0.396, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.0798898071625345, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.00014818579509415008, |
|
"loss": 0.4218, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.0826446280991735, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.0001479248986720057, |
|
"loss": 0.416, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.0853994490358128, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00014766357808685158, |
|
"loss": 0.4134, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.0881542699724518, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.00014740183565152575, |
|
"loss": 0.4133, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.0909090909090908, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.0001471396736825998, |
|
"loss": 0.4277, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.09366391184573, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.00014687709450035837, |
|
"loss": 0.4151, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.0964187327823691, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.0001466141004287789, |
|
"loss": 0.4359, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.0991735537190084, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.00014635069379551055, |
|
"loss": 0.4354, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.1019283746556474, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.00014608687693185411, |
|
"loss": 0.4238, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.1046831955922864, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00014582265217274104, |
|
"loss": 0.428, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.1074380165289257, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 0.00014555802185671298, |
|
"loss": 0.4207, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.1101928374655647, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.00014529298832590085, |
|
"loss": 0.4401, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.1129476584022038, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00014502755392600442, |
|
"loss": 0.4105, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.115702479338843, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00014476172100627127, |
|
"loss": 0.4231, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.118457300275482, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.00014449549191947615, |
|
"loss": 0.4295, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.121212121212121, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00014422886902190014, |
|
"loss": 0.4217, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.1239669421487604, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.00014396185467330973, |
|
"loss": 0.429, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.1267217630853994, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.00014369445123693596, |
|
"loss": 0.4326, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.1294765840220387, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.00014342666107945362, |
|
"loss": 0.4187, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.1322314049586777, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.00014315848657096004, |
|
"loss": 0.4242, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.1349862258953167, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00014288993008495452, |
|
"loss": 0.4036, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.137741046831956, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.00014262099399831683, |
|
"loss": 0.4045, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.140495867768595, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.00014235168069128657, |
|
"loss": 0.4246, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.1432506887052343, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00014208199254744192, |
|
"loss": 0.4347, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.1460055096418733, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.00014181193195367858, |
|
"loss": 0.4302, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.1487603305785123, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.00014154150130018866, |
|
"loss": 0.4143, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.1515151515151516, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.00014127070298043947, |
|
"loss": 0.4227, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.1542699724517906, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00014099953939115248, |
|
"loss": 0.4179, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.1570247933884297, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.00014072801293228188, |
|
"loss": 0.4135, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.159779614325069, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.0001404561260069935, |
|
"loss": 0.4389, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.162534435261708, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.00014018388102164363, |
|
"loss": 0.4126, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.165289256198347, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00013991128038575741, |
|
"loss": 0.4252, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.1680440771349863, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.00013963832651200784, |
|
"loss": 0.4259, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.1707988980716253, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 0.00013936502181619416, |
|
"loss": 0.4266, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.1735537190082646, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.00013909136871722067, |
|
"loss": 0.4137, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.1763085399449036, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.0001388173696370752, |
|
"loss": 0.4187, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.1790633608815426, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.00013854302700080776, |
|
"loss": 0.3995, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.1818181818181819, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.000138268343236509, |
|
"loss": 0.4146, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.184573002754821, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0001379933207752887, |
|
"loss": 0.4207, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.1873278236914602, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0001377179620512544, |
|
"loss": 0.4037, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.1900826446280992, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.00013744226950148972, |
|
"loss": 0.4294, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.1928374655647382, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00013716624556603274, |
|
"loss": 0.4178, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.1955922865013775, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.0001368898926878547, |
|
"loss": 0.4272, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.1983471074380165, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.00013661321331283796, |
|
"loss": 0.4162, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.2011019283746556, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00013633620988975464, |
|
"loss": 0.4381, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.2038567493112948, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.000136058884870245, |
|
"loss": 0.4238, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.2066115702479339, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00013578124070879534, |
|
"loss": 0.4297, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.209366391184573, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.00013550327986271686, |
|
"loss": 0.4095, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.2121212121212122, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00013522500479212337, |
|
"loss": 0.4156, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.2148760330578512, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.00013494641795990986, |
|
"loss": 0.4268, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.2176308539944904, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.0001346675218317305, |
|
"loss": 0.4088, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.2203856749311295, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.0001343883188759771, |
|
"loss": 0.4092, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.2231404958677685, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00013410881156375684, |
|
"loss": 0.4108, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.2258953168044078, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00013382900236887075, |
|
"loss": 0.4333, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.2286501377410468, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.00013354889376779167, |
|
"loss": 0.4267, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.231404958677686, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00013326848823964243, |
|
"loss": 0.4164, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.234159779614325, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00013298778826617365, |
|
"loss": 0.4092, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.2369146005509641, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.00013270679633174218, |
|
"loss": 0.4175, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.2396694214876034, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.00013242551492328875, |
|
"loss": 0.4297, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.2424242424242424, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.00013214394653031616, |
|
"loss": 0.4221, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.2451790633608815, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.0001318620936448672, |
|
"loss": 0.4161, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.2479338842975207, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.0001315799587615025, |
|
"loss": 0.4319, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.2506887052341598, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.00013129754437727863, |
|
"loss": 0.4122, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.2534435261707988, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.00013101485299172588, |
|
"loss": 0.4014, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.256198347107438, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.0001307318871068261, |
|
"loss": 0.426, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.258953168044077, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.0001304486492269907, |
|
"loss": 0.4305, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.2617079889807163, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.0001301651418590384, |
|
"loss": 0.4289, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.2644628099173554, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.00012988136751217291, |
|
"loss": 0.4198, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.2672176308539944, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.00012959732869796103, |
|
"loss": 0.4475, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.2699724517906337, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.0001293130279303102, |
|
"loss": 0.4227, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.2727272727272727, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.00012902846772544624, |
|
"loss": 0.3967, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.275482093663912, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.00012874365060189117, |
|
"loss": 0.4117, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.278236914600551, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00012845857908044096, |
|
"loss": 0.408, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.28099173553719, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.00012817325568414297, |
|
"loss": 0.4066, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.283746556473829, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.00012788768293827402, |
|
"loss": 0.4183, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.2865013774104683, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.00012760186337031762, |
|
"loss": 0.4156, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.2892561983471074, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.000127315799509942, |
|
"loss": 0.4126, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.2920110192837466, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.0001270294938889773, |
|
"loss": 0.4305, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.2947658402203857, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.0001267429490413935, |
|
"loss": 0.4245, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.2975206611570247, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.0001264561675032779, |
|
"loss": 0.4272, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.300275482093664, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.0001261691518128126, |
|
"loss": 0.4057, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.303030303030303, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00012588190451025207, |
|
"loss": 0.4025, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.3057851239669422, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00012559442813790076, |
|
"loss": 0.4126, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.3085399449035813, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.00012530672524009036, |
|
"loss": 0.4268, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.3112947658402203, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00012501879836315764, |
|
"loss": 0.411, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.3140495867768596, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00012473065005542155, |
|
"loss": 0.4174, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.3168044077134986, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.00012444228286716095, |
|
"loss": 0.4145, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.3195592286501379, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.00012415369935059184, |
|
"loss": 0.4299, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.322314049586777, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.00012386490205984488, |
|
"loss": 0.4223, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.325068870523416, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.00012357589355094275, |
|
"loss": 0.4076, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.327823691460055, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0001232866763817775, |
|
"loss": 0.4252, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.3305785123966942, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.00012299725311208808, |
|
"loss": 0.421, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.00012270762630343734, |
|
"loss": 0.4116, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.3360881542699725, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.0001224177985191897, |
|
"loss": 0.4116, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.3388429752066116, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00012212777232448838, |
|
"loss": 0.4069, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.3415977961432506, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00012183755028623245, |
|
"loss": 0.4101, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.3443526170798898, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.00012154713497305455, |
|
"loss": 0.4395, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.3471074380165289, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.00012125652895529766, |
|
"loss": 0.4243, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.3498622589531681, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00012096573480499289, |
|
"loss": 0.4087, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.3526170798898072, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.00012067475509583616, |
|
"loss": 0.4061, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.3553719008264462, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.00012038359240316589, |
|
"loss": 0.4051, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.3581267217630855, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00012009224930393988, |
|
"loss": 0.3838, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.3608815426997245, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00011980072837671273, |
|
"loss": 0.3986, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.3636363636363638, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.00011950903220161285, |
|
"loss": 0.4179, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.3663911845730028, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.00011921716336031972, |
|
"loss": 0.3922, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.3691460055096418, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.00011892512443604102, |
|
"loss": 0.4042, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.3719008264462809, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0001186329180134898, |
|
"loss": 0.4099, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.3746556473829201, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00011834054667886145, |
|
"loss": 0.405, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.3774104683195592, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00011804801301981108, |
|
"loss": 0.4068, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.3801652892561984, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.00011775531962543036, |
|
"loss": 0.4156, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.3829201101928374, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00011746246908622467, |
|
"loss": 0.4173, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.3856749311294765, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.00011716946399409039, |
|
"loss": 0.4111, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.3884297520661157, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.0001168763069422916, |
|
"loss": 0.4234, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.3911845730027548, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.00011658300052543742, |
|
"loss": 0.4213, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.393939393939394, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.0001162895473394589, |
|
"loss": 0.4104, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.396694214876033, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.00011599594998158602, |
|
"loss": 0.4113, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.399449035812672, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.0001157022110503249, |
|
"loss": 0.4175, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.4022038567493114, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00011540833314543458, |
|
"loss": 0.402, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.4049586776859504, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00011511431886790407, |
|
"loss": 0.4215, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.4077134986225897, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.00011482017081992944, |
|
"loss": 0.4189, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.4104683195592287, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00011452589160489062, |
|
"loss": 0.4025, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.4132231404958677, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00011423148382732853, |
|
"loss": 0.3975, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.4159779614325068, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.00011393695009292185, |
|
"loss": 0.4094, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.418732782369146, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.00011364229300846417, |
|
"loss": 0.4179, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.421487603305785, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00011334751518184061, |
|
"loss": 0.4076, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.4242424242424243, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00011305261922200519, |
|
"loss": 0.3997, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.4269972451790633, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0001127576077389572, |
|
"loss": 0.41, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.4297520661157024, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0001124624833437186, |
|
"loss": 0.4096, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.4325068870523416, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.00011216724864831056, |
|
"loss": 0.426, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.4352617079889807, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.00011187190626573052, |
|
"loss": 0.3964, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.43801652892562, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00011157645880992902, |
|
"loss": 0.4115, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.440771349862259, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.00011128090889578648, |
|
"loss": 0.4288, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.443526170798898, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.00011098525913909025, |
|
"loss": 0.4084, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.4462809917355373, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.00011068951215651132, |
|
"loss": 0.4067, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.4490358126721763, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00011039367056558103, |
|
"loss": 0.4235, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.4517906336088156, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00011009773698466832, |
|
"loss": 0.4003, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.4545454545454546, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.0001098017140329561, |
|
"loss": 0.4104, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.4573002754820936, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.00010950560433041826, |
|
"loss": 0.4165, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.4600550964187327, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.00010920941049779667, |
|
"loss": 0.4114, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.462809917355372, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.0001089131351565776, |
|
"loss": 0.4035, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.465564738292011, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00010861678092896881, |
|
"loss": 0.4025, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.4683195592286502, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.00010832035043787625, |
|
"loss": 0.4211, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.4710743801652892, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.00010802384630688078, |
|
"loss": 0.4175, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.4738292011019283, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00010772727116021513, |
|
"loss": 0.3955, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.4765840220385675, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.00010743062762274046, |
|
"loss": 0.4233, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.4793388429752066, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.00010713391831992323, |
|
"loss": 0.4105, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.4820936639118458, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00010683714587781209, |
|
"loss": 0.4012, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.4848484848484849, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.00010654031292301432, |
|
"loss": 0.3917, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.487603305785124, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.00010624342208267292, |
|
"loss": 0.4105, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.4903581267217632, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00010594647598444312, |
|
"loss": 0.4166, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.4931129476584022, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 0.0001056494772564693, |
|
"loss": 0.4642, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.4958677685950414, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.00010535242852736151, |
|
"loss": 0.4127, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.4986225895316805, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.00010505533242617254, |
|
"loss": 0.4035, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.5013774104683195, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00010475819158237425, |
|
"loss": 0.4136, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.5041322314049586, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.00010446100862583459, |
|
"loss": 0.41, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.5068870523415978, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.00010416378618679425, |
|
"loss": 0.4264, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.509641873278237, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.0001038665268958433, |
|
"loss": 0.4157, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.512396694214876, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00010356923338389806, |
|
"loss": 0.4109, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.5151515151515151, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.00010327190828217763, |
|
"loss": 0.4196, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.5179063360881542, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.00010297455422218074, |
|
"loss": 0.4192, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.5206611570247934, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00010267717383566246, |
|
"loss": 0.4453, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.5234159779614325, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.00010237976975461075, |
|
"loss": 0.4115, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.5261707988980717, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00010208234461122343, |
|
"loss": 0.402, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.5289256198347108, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.0001017849010378846, |
|
"loss": 0.4132, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.5316804407713498, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.00010148744166714159, |
|
"loss": 0.3973, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.5344352617079888, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.00010118996913168144, |
|
"loss": 0.4122, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.537190082644628, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00010089248606430775, |
|
"loss": 0.4054, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.5399449035812673, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00010059499509791736, |
|
"loss": 0.4052, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.5426997245179064, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.00010029749886547701, |
|
"loss": 0.3983, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.5454545454545454, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.0001, |
|
"loss": 0.4236, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.5482093663911844, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 9.970250113452301e-05, |
|
"loss": 0.4274, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.5509641873278237, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 9.940500490208263e-05, |
|
"loss": 0.4133, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.553719008264463, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 9.910751393569229e-05, |
|
"loss": 0.4218, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.556473829201102, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 9.881003086831859e-05, |
|
"loss": 0.4155, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.559228650137741, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 9.851255833285843e-05, |
|
"loss": 0.4111, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.56198347107438, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 9.821509896211539e-05, |
|
"loss": 0.3969, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.5647382920110193, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 9.791765538877659e-05, |
|
"loss": 0.4197, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.5674931129476584, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 9.762023024538926e-05, |
|
"loss": 0.4182, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.5702479338842976, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 9.732282616433756e-05, |
|
"loss": 0.4203, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.5730027548209367, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 9.702544577781925e-05, |
|
"loss": 0.4083, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.5757575757575757, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 9.67280917178224e-05, |
|
"loss": 0.4044, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.5785123966942147, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 9.643076661610196e-05, |
|
"loss": 0.4112, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.581267217630854, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 9.613347310415672e-05, |
|
"loss": 0.4021, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.5840220385674932, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 9.583621381320576e-05, |
|
"loss": 0.4215, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.5867768595041323, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 9.553899137416545e-05, |
|
"loss": 0.402, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.5895316804407713, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 9.524180841762577e-05, |
|
"loss": 0.3915, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.5922865013774103, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 9.494466757382748e-05, |
|
"loss": 0.4214, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.5950413223140496, |
|
"grad_norm": 0.375, |
|
"learning_rate": 9.464757147263849e-05, |
|
"loss": 0.3978, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.5977961432506889, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 9.435052274353074e-05, |
|
"loss": 0.4008, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.600550964187328, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 9.405352401555691e-05, |
|
"loss": 0.3899, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.603305785123967, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 9.37565779173271e-05, |
|
"loss": 0.4085, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.606060606060606, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 9.345968707698569e-05, |
|
"loss": 0.3965, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.6088154269972452, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 9.316285412218795e-05, |
|
"loss": 0.4, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.6115702479338843, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 9.286608168007678e-05, |
|
"loss": 0.396, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.6143250688705235, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 9.256937237725958e-05, |
|
"loss": 0.4255, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.6170798898071626, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.227272883978488e-05, |
|
"loss": 0.3959, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.6198347107438016, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 9.197615369311925e-05, |
|
"loss": 0.4098, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.6225895316804406, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 9.167964956212378e-05, |
|
"loss": 0.4028, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.6253443526170799, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 9.13832190710312e-05, |
|
"loss": 0.4148, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.6280991735537191, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 9.108686484342241e-05, |
|
"loss": 0.3891, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.6308539944903582, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 9.079058950220336e-05, |
|
"loss": 0.4064, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.6336088154269972, |
|
"grad_norm": 0.375, |
|
"learning_rate": 9.049439566958175e-05, |
|
"loss": 0.414, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.6363636363636362, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 9.019828596704394e-05, |
|
"loss": 0.4194, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.6391184573002755, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 8.99022630153317e-05, |
|
"loss": 0.4016, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.6418732782369148, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 8.960632943441896e-05, |
|
"loss": 0.4196, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.6446280991735538, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 8.931048784348875e-05, |
|
"loss": 0.4106, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.6473829201101928, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 8.901474086090977e-05, |
|
"loss": 0.4151, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.6501377410468319, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 8.871909110421353e-05, |
|
"loss": 0.4048, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.6528925619834711, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 8.842354119007099e-05, |
|
"loss": 0.4107, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.6556473829201102, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 8.812809373426951e-05, |
|
"loss": 0.391, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.6584022038567494, |
|
"grad_norm": 0.375, |
|
"learning_rate": 8.783275135168945e-05, |
|
"loss": 0.392, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.6611570247933884, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 8.753751665628141e-05, |
|
"loss": 0.4218, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.6639118457300275, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 8.724239226104281e-05, |
|
"loss": 0.408, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 8.694738077799488e-05, |
|
"loss": 0.3977, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.6694214876033058, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 8.665248481815941e-05, |
|
"loss": 0.3919, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.672176308539945, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 8.635770699153587e-05, |
|
"loss": 0.4258, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.674931129476584, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 8.606304990707815e-05, |
|
"loss": 0.4039, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.677685950413223, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 8.57685161726715e-05, |
|
"loss": 0.3994, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.6804407713498621, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 8.547410839510939e-05, |
|
"loss": 0.4064, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.6831955922865014, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 8.517982918007059e-05, |
|
"loss": 0.4104, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.6859504132231407, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 8.488568113209593e-05, |
|
"loss": 0.3915, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.6887052341597797, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 8.459166685456547e-05, |
|
"loss": 0.4006, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.6914600550964187, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 8.42977889496751e-05, |
|
"loss": 0.4068, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.6942148760330578, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 8.400405001841399e-05, |
|
"loss": 0.4321, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.696969696969697, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 8.371045266054114e-05, |
|
"loss": 0.3974, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.699724517906336, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 8.34169994745626e-05, |
|
"loss": 0.3953, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.7024793388429753, |
|
"grad_norm": 0.375, |
|
"learning_rate": 8.312369305770843e-05, |
|
"loss": 0.3908, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.7052341597796143, |
|
"grad_norm": 0.375, |
|
"learning_rate": 8.283053600590962e-05, |
|
"loss": 0.3852, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.7079889807162534, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 8.253753091377533e-05, |
|
"loss": 0.4014, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.7107438016528924, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 8.224468037456969e-05, |
|
"loss": 0.392, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.7134986225895317, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 8.195198698018896e-05, |
|
"loss": 0.4187, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.716253443526171, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 8.165945332113857e-05, |
|
"loss": 0.422, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.71900826446281, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 8.136708198651021e-05, |
|
"loss": 0.3927, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.721763085399449, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 8.107487556395901e-05, |
|
"loss": 0.4004, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.724517906336088, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 8.07828366396803e-05, |
|
"loss": 0.4061, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.7272727272727273, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 8.049096779838719e-05, |
|
"loss": 0.3846, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.7300275482093666, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 8.01992716232873e-05, |
|
"loss": 0.406, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.7327823691460056, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 7.990775069606012e-05, |
|
"loss": 0.4048, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.7355371900826446, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 7.961640759683416e-05, |
|
"loss": 0.4172, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.7382920110192837, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 7.932524490416385e-05, |
|
"loss": 0.3989, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.741046831955923, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 7.903426519500714e-05, |
|
"loss": 0.3922, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.743801652892562, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 7.874347104470234e-05, |
|
"loss": 0.3937, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.7465564738292012, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 7.845286502694551e-05, |
|
"loss": 0.4166, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.7493112947658402, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 7.816244971376757e-05, |
|
"loss": 0.4166, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.7520661157024793, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 7.787222767551164e-05, |
|
"loss": 0.395, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.7548209366391183, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 7.758220148081028e-05, |
|
"loss": 0.3938, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.7575757575757576, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 7.729237369656269e-05, |
|
"loss": 0.4089, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.7603305785123968, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 7.700274688791196e-05, |
|
"loss": 0.3922, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.7630853994490359, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 7.671332361822249e-05, |
|
"loss": 0.402, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.765840220385675, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 7.642410644905726e-05, |
|
"loss": 0.4052, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.768595041322314, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 7.613509794015517e-05, |
|
"loss": 0.4091, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.7713498622589532, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 7.584630064940819e-05, |
|
"loss": 0.3989, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.7741046831955924, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 7.555771713283907e-05, |
|
"loss": 0.4085, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.7768595041322315, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 7.526934994457844e-05, |
|
"loss": 0.3883, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.7796143250688705, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 7.498120163684239e-05, |
|
"loss": 0.4039, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.7823691460055096, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 7.469327475990966e-05, |
|
"loss": 0.3922, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.7851239669421488, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 7.440557186209926e-05, |
|
"loss": 0.3944, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.7878787878787878, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 7.411809548974792e-05, |
|
"loss": 0.3925, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.790633608815427, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 7.383084818718741e-05, |
|
"loss": 0.4092, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.7933884297520661, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 7.354383249672212e-05, |
|
"loss": 0.3987, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.7961432506887052, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 7.325705095860652e-05, |
|
"loss": 0.4096, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.7988980716253442, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 7.297050611102272e-05, |
|
"loss": 0.3975, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.8016528925619835, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 7.268420049005807e-05, |
|
"loss": 0.4046, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.8044077134986227, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 7.239813662968237e-05, |
|
"loss": 0.3886, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.8071625344352618, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 7.211231706172601e-05, |
|
"loss": 0.4027, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.8099173553719008, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 7.182674431585704e-05, |
|
"loss": 0.3948, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.8126721763085398, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 7.15414209195591e-05, |
|
"loss": 0.404, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.815426997245179, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 7.125634939810884e-05, |
|
"loss": 0.4014, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 7.097153227455379e-05, |
|
"loss": 0.3887, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.8209366391184574, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 7.068697206968979e-05, |
|
"loss": 0.3937, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.8236914600550964, |
|
"grad_norm": 0.375, |
|
"learning_rate": 7.040267130203896e-05, |
|
"loss": 0.3945, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.8264462809917354, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 7.011863248782711e-05, |
|
"loss": 0.427, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.8292011019283747, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 6.983485814096165e-05, |
|
"loss": 0.4112, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.8319559228650137, |
|
"grad_norm": 0.375, |
|
"learning_rate": 6.955135077300931e-05, |
|
"loss": 0.3874, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.834710743801653, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 6.92681128931739e-05, |
|
"loss": 0.395, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.837465564738292, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 6.898514700827417e-05, |
|
"loss": 0.4028, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.840220385674931, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 6.870245562272138e-05, |
|
"loss": 0.4038, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.84297520661157, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 6.842004123849752e-05, |
|
"loss": 0.3949, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.8457300275482094, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 6.813790635513282e-05, |
|
"loss": 0.3951, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.8484848484848486, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 6.785605346968386e-05, |
|
"loss": 0.4097, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.8512396694214877, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 6.757448507671128e-05, |
|
"loss": 0.4104, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.8539944903581267, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 6.729320366825784e-05, |
|
"loss": 0.3977, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.8567493112947657, |
|
"grad_norm": 0.375, |
|
"learning_rate": 6.701221173382636e-05, |
|
"loss": 0.3932, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.859504132231405, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 6.673151176035762e-05, |
|
"loss": 0.4114, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.8622589531680442, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 6.645110623220834e-05, |
|
"loss": 0.4022, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.8650137741046833, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 6.617099763112929e-05, |
|
"loss": 0.4017, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.8677685950413223, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 6.589118843624315e-05, |
|
"loss": 0.4046, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.8705234159779613, |
|
"grad_norm": 0.375, |
|
"learning_rate": 6.561168112402294e-05, |
|
"loss": 0.3818, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.8732782369146006, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 6.533247816826948e-05, |
|
"loss": 0.4053, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.8760330578512396, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 6.505358204009017e-05, |
|
"loss": 0.3957, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.878787878787879, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 6.477499520787665e-05, |
|
"loss": 0.397, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.881542699724518, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 6.449672013728318e-05, |
|
"loss": 0.4106, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.884297520661157, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 6.421875929120469e-05, |
|
"loss": 0.3964, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.887052341597796, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 6.394111512975504e-05, |
|
"loss": 0.4169, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.8898071625344353, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 6.366379011024535e-05, |
|
"loss": 0.3912, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.8925619834710745, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 6.338678668716209e-05, |
|
"loss": 0.4056, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.8953168044077136, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 6.311010731214533e-05, |
|
"loss": 0.4081, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.8980716253443526, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 6.283375443396726e-05, |
|
"loss": 0.3948, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.9008264462809916, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 6.25577304985103e-05, |
|
"loss": 0.412, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.9035812672176309, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 6.228203794874562e-05, |
|
"loss": 0.407, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.9063360881542701, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 6.200667922471132e-05, |
|
"loss": 0.3945, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.9090909090909092, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 6.173165676349103e-05, |
|
"loss": 0.3955, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.9118457300275482, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 6.145697299919226e-05, |
|
"loss": 0.3903, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.9146005509641872, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 6.118263036292479e-05, |
|
"loss": 0.3874, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.9173553719008265, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 6.090863128277938e-05, |
|
"loss": 0.3856, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.9201101928374655, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 6.063497818380587e-05, |
|
"loss": 0.4032, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.9228650137741048, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 6.036167348799219e-05, |
|
"loss": 0.394, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.9256198347107438, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 6.008871961424258e-05, |
|
"loss": 0.3948, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.9283746556473829, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 5.9816118978356394e-05, |
|
"loss": 0.3953, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.931129476584022, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 5.9543873993006496e-05, |
|
"loss": 0.4042, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.9338842975206612, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 5.9271987067718125e-05, |
|
"loss": 0.3912, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.9366391184573004, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 5.900046060884753e-05, |
|
"loss": 0.4091, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.9393939393939394, |
|
"grad_norm": 0.375, |
|
"learning_rate": 5.872929701956054e-05, |
|
"loss": 0.3874, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.9421487603305785, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 5.845849869981137e-05, |
|
"loss": 0.3867, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.9449035812672175, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 5.818806804632142e-05, |
|
"loss": 0.379, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.9476584022038568, |
|
"grad_norm": 0.375, |
|
"learning_rate": 5.7918007452558085e-05, |
|
"loss": 0.3879, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.950413223140496, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 5.7648319308713464e-05, |
|
"loss": 0.3876, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.953168044077135, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 5.73790060016832e-05, |
|
"loss": 0.3841, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.955922865013774, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 5.711006991504551e-05, |
|
"loss": 0.4022, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.9586776859504131, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 5.684151342903992e-05, |
|
"loss": 0.3988, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.9614325068870524, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 5.657333892054643e-05, |
|
"loss": 0.4002, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.9641873278236914, |
|
"grad_norm": 0.375, |
|
"learning_rate": 5.630554876306407e-05, |
|
"loss": 0.388, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.9669421487603307, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 5.603814532669032e-05, |
|
"loss": 0.395, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.9696969696969697, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 5.577113097809989e-05, |
|
"loss": 0.3799, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.9724517906336088, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 5.550450808052388e-05, |
|
"loss": 0.3883, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.9752066115702478, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 5.5238278993728756e-05, |
|
"loss": 0.3942, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.977961432506887, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 5.497244607399561e-05, |
|
"loss": 0.4083, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.9807162534435263, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 5.470701167409917e-05, |
|
"loss": 0.3901, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.9834710743801653, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 5.4441978143287066e-05, |
|
"loss": 0.3921, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.9862258953168044, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 5.417734782725896e-05, |
|
"loss": 0.4082, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.9889807162534434, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 5.39131230681459e-05, |
|
"loss": 0.3827, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.9917355371900827, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 5.364930620448946e-05, |
|
"loss": 0.4046, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.994490358126722, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 5.3385899571221175e-05, |
|
"loss": 0.3847, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.997245179063361, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 5.3122905499641615e-05, |
|
"loss": 0.3954, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 5.286032631740023e-05, |
|
"loss": 0.3724, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_train_loss": 0.4305049180984497, |
|
"eval_train_runtime": 8.5141, |
|
"eval_train_samples_per_second": 158.207, |
|
"eval_train_steps_per_second": 19.849, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 2.002754820936639, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 5.259816434847426e-05, |
|
"loss": 0.3496, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 2.005509641873278, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 5.233642191314842e-05, |
|
"loss": 0.3539, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 2.0082644628099175, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 5.207510132799436e-05, |
|
"loss": 0.3534, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 2.0110192837465566, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 5.1814204905849906e-05, |
|
"loss": 0.3523, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.0137741046831956, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 5.155373495579911e-05, |
|
"loss": 0.3512, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 2.0165289256198347, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 5.1293693783151275e-05, |
|
"loss": 0.3482, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 2.0192837465564737, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 5.1034083689420905e-05, |
|
"loss": 0.3623, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 2.022038567493113, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 5.0774906972307044e-05, |
|
"loss": 0.3445, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 2.024793388429752, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 5.051616592567323e-05, |
|
"loss": 0.3555, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.0275482093663912, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 5.025786283952709e-05, |
|
"loss": 0.3446, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 2.0303030303030303, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 5.000000000000002e-05, |
|
"loss": 0.3495, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 2.0330578512396693, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 4.9742579689326874e-05, |
|
"loss": 0.3586, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 2.0358126721763083, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 4.948560418582598e-05, |
|
"loss": 0.3502, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 2.038567493112948, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 4.9229075763878806e-05, |
|
"loss": 0.3483, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.041322314049587, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 4.8972996693910054e-05, |
|
"loss": 0.3464, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 2.044077134986226, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 4.871736924236714e-05, |
|
"loss": 0.3538, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 2.046831955922865, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 4.8462195671700574e-05, |
|
"loss": 0.3398, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 2.049586776859504, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 4.82074782403437e-05, |
|
"loss": 0.3453, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 2.0523415977961434, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 4.795321920269279e-05, |
|
"loss": 0.3538, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.0550964187327825, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 4.7699420809087044e-05, |
|
"loss": 0.3464, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 2.0578512396694215, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 4.7446085305788725e-05, |
|
"loss": 0.3458, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 2.0606060606060606, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 4.7193214934963206e-05, |
|
"loss": 0.345, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 2.0633608815426996, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 4.694081193465921e-05, |
|
"loss": 0.3573, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 2.0661157024793386, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 4.668887853878896e-05, |
|
"loss": 0.3493, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.068870523415978, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 4.643741697710837e-05, |
|
"loss": 0.3369, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 2.071625344352617, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 4.618642947519739e-05, |
|
"loss": 0.3507, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 2.074380165289256, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 4.593591825444028e-05, |
|
"loss": 0.347, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 2.077134986225895, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 4.5685885532005803e-05, |
|
"loss": 0.344, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 2.0798898071625342, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 4.543633352082794e-05, |
|
"loss": 0.3525, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.0826446280991737, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 4.518726442958599e-05, |
|
"loss": 0.3545, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 2.0853994490358128, |
|
"grad_norm": 0.375, |
|
"learning_rate": 4.493868046268514e-05, |
|
"loss": 0.3466, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 2.088154269972452, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 4.4690583820236933e-05, |
|
"loss": 0.3598, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 2.090909090909091, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 4.444297669803981e-05, |
|
"loss": 0.359, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 2.09366391184573, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 4.419586128755967e-05, |
|
"loss": 0.3599, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.0964187327823693, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 4.394923977591059e-05, |
|
"loss": 0.3623, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 2.0991735537190084, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 4.370311434583525e-05, |
|
"loss": 0.3434, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 2.1019283746556474, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 4.345748717568567e-05, |
|
"loss": 0.3437, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 2.1046831955922864, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 4.321236043940411e-05, |
|
"loss": 0.3563, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 2.1074380165289255, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 4.296773630650358e-05, |
|
"loss": 0.3448, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.110192837465565, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 4.272361694204894e-05, |
|
"loss": 0.3412, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 2.112947658402204, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 4.248000450663734e-05, |
|
"loss": 0.3505, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 2.115702479338843, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 4.223690115637944e-05, |
|
"loss": 0.3565, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 2.118457300275482, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 4.19943090428802e-05, |
|
"loss": 0.3422, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 2.121212121212121, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 4.17522303132198e-05, |
|
"loss": 0.3523, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.12396694214876, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 4.15106671099347e-05, |
|
"loss": 0.3444, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 2.1267217630853996, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 4.1269621570998665e-05, |
|
"loss": 0.3528, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 2.1294765840220387, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 4.10290958298038e-05, |
|
"loss": 0.3409, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 2.1322314049586777, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 4.0789092015141714e-05, |
|
"loss": 0.3555, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 2.1349862258953167, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 4.054961225118469e-05, |
|
"loss": 0.3419, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.1377410468319558, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 4.0310658657466816e-05, |
|
"loss": 0.3552, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 2.1404958677685952, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 4.007223334886531e-05, |
|
"loss": 0.3433, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 2.1432506887052343, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 3.983433843558176e-05, |
|
"loss": 0.3716, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 2.1460055096418733, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 3.9596976023123344e-05, |
|
"loss": 0.3351, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 2.1487603305785123, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 3.9360148212284475e-05, |
|
"loss": 0.3482, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.1515151515151514, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 3.9123857099127936e-05, |
|
"loss": 0.3607, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 2.1542699724517904, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 3.888810477496645e-05, |
|
"loss": 0.3481, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 2.15702479338843, |
|
"grad_norm": 0.375, |
|
"learning_rate": 3.865289332634407e-05, |
|
"loss": 0.3511, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 2.159779614325069, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 3.841822483501787e-05, |
|
"loss": 0.3659, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 2.162534435261708, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 3.8184101377939476e-05, |
|
"loss": 0.3526, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.165289256198347, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 3.7950525027236585e-05, |
|
"loss": 0.349, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 2.168044077134986, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 3.7717497850194614e-05, |
|
"loss": 0.3534, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 2.1707988980716255, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 3.748502190923863e-05, |
|
"loss": 0.3456, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 2.1735537190082646, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 3.725309926191479e-05, |
|
"loss": 0.3515, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 2.1763085399449036, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 3.7021731960872484e-05, |
|
"loss": 0.3419, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.1790633608815426, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 3.679092205384574e-05, |
|
"loss": 0.3455, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 2.1818181818181817, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 3.6560671583635467e-05, |
|
"loss": 0.3436, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 2.184573002754821, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 3.6330982588091186e-05, |
|
"loss": 0.347, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 2.18732782369146, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 3.610185710009304e-05, |
|
"loss": 0.3509, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 2.190082644628099, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 3.5873297147533915e-05, |
|
"loss": 0.3465, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.1928374655647382, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 3.5645304753301176e-05, |
|
"loss": 0.3552, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 2.1955922865013773, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 3.541788193525913e-05, |
|
"loss": 0.3521, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 2.1983471074380168, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 3.519103070623096e-05, |
|
"loss": 0.3502, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 2.201101928374656, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 3.496475307398095e-05, |
|
"loss": 0.352, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 2.203856749311295, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 3.4739051041196715e-05, |
|
"loss": 0.3531, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.206611570247934, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 3.45139266054715e-05, |
|
"loss": 0.3463, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 2.209366391184573, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 3.428938175928648e-05, |
|
"loss": 0.3448, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 2.212121212121212, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 3.406541848999312e-05, |
|
"loss": 0.3358, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 2.2148760330578514, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 3.384203877979559e-05, |
|
"loss": 0.3404, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 2.2176308539944904, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 3.361924460573325e-05, |
|
"loss": 0.3488, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.2203856749311295, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 3.33970379396631e-05, |
|
"loss": 0.3549, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 2.2231404958677685, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 3.3175420748242406e-05, |
|
"loss": 0.3399, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 2.2258953168044076, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 3.2954394992911083e-05, |
|
"loss": 0.355, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 2.228650137741047, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 3.273396262987475e-05, |
|
"loss": 0.3513, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 2.231404958677686, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 3.2514125610086955e-05, |
|
"loss": 0.3545, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.234159779614325, |
|
"grad_norm": 0.375, |
|
"learning_rate": 3.229488587923225e-05, |
|
"loss": 0.3513, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 2.236914600550964, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 3.207624537770868e-05, |
|
"loss": 0.3367, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 2.239669421487603, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 3.185820604061088e-05, |
|
"loss": 0.3468, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 2.242424242424242, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 3.164076979771287e-05, |
|
"loss": 0.333, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 2.2451790633608817, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 3.142393857345085e-05, |
|
"loss": 0.3651, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.2479338842975207, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 3.1207714286906256e-05, |
|
"loss": 0.3501, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 2.2506887052341598, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 3.099209885178882e-05, |
|
"loss": 0.333, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 2.253443526170799, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 3.077709417641953e-05, |
|
"loss": 0.3457, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 2.256198347107438, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 3.056270216371395e-05, |
|
"loss": 0.3444, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 2.2589531680440773, |
|
"grad_norm": 0.375, |
|
"learning_rate": 3.0348924711165006e-05, |
|
"loss": 0.347, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.2617079889807163, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 3.013576371082655e-05, |
|
"loss": 0.3365, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 2.2644628099173554, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 2.9923221049296446e-05, |
|
"loss": 0.3408, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 2.2672176308539944, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 2.971129860769992e-05, |
|
"loss": 0.3423, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 2.2699724517906334, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 2.9499998261672855e-05, |
|
"loss": 0.3471, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 2.2727272727272725, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 2.9289321881345254e-05, |
|
"loss": 0.3382, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.275482093663912, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 2.9079271331324664e-05, |
|
"loss": 0.3403, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 2.278236914600551, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 2.8869848470679682e-05, |
|
"loss": 0.3394, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 2.28099173553719, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 2.8661055152923456e-05, |
|
"loss": 0.358, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 2.283746556473829, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 2.8452893225997346e-05, |
|
"loss": 0.3349, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 2.2865013774104685, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 2.8245364532254525e-05, |
|
"loss": 0.337, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.2892561983471076, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 2.8038470908443714e-05, |
|
"loss": 0.3484, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 2.2920110192837466, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 2.7832214185692884e-05, |
|
"loss": 0.3402, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 2.2947658402203857, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 2.7626596189492983e-05, |
|
"loss": 0.3424, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 2.2975206611570247, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 2.742161873968202e-05, |
|
"loss": 0.3502, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 2.3002754820936637, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 2.7217283650428683e-05, |
|
"loss": 0.3502, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.303030303030303, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 2.7013592730216465e-05, |
|
"loss": 0.3415, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 2.3057851239669422, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 2.681054778182748e-05, |
|
"loss": 0.3289, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 2.3085399449035813, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 2.6608150602326708e-05, |
|
"loss": 0.3464, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 2.3112947658402203, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 2.6406402983046053e-05, |
|
"loss": 0.353, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 2.3140495867768593, |
|
"grad_norm": 0.375, |
|
"learning_rate": 2.6205306709568354e-05, |
|
"loss": 0.3582, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.316804407713499, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 2.6004863561711635e-05, |
|
"loss": 0.3436, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 2.319559228650138, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 2.580507531351345e-05, |
|
"loss": 0.3421, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 2.322314049586777, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 2.5605943733215042e-05, |
|
"loss": 0.3429, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 2.325068870523416, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 2.540747058324593e-05, |
|
"loss": 0.3563, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 2.327823691460055, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 2.5209657620207915e-05, |
|
"loss": 0.3417, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.330578512396694, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 2.501250659485992e-05, |
|
"loss": 0.3553, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 2.3333333333333335, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 2.4816019252102273e-05, |
|
"loss": 0.344, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 2.3360881542699725, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 2.462019733096136e-05, |
|
"loss": 0.3448, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 2.3388429752066116, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 2.4425042564574184e-05, |
|
"loss": 0.347, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 2.3415977961432506, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 2.4230556680173045e-05, |
|
"loss": 0.3585, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.3443526170798896, |
|
"grad_norm": 0.375, |
|
"learning_rate": 2.403674139907025e-05, |
|
"loss": 0.3375, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 2.347107438016529, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 2.38435984366429e-05, |
|
"loss": 0.3456, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 2.349862258953168, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 2.3651129502317647e-05, |
|
"loss": 0.3514, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 2.352617079889807, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 2.3459336299555657e-05, |
|
"loss": 0.3505, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 2.355371900826446, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 2.3268220525837437e-05, |
|
"loss": 0.3349, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.3581267217630852, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 2.3077783872647896e-05, |
|
"loss": 0.3483, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 2.3608815426997243, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 2.288802802546124e-05, |
|
"loss": 0.3359, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 2.3636363636363638, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 2.26989546637263e-05, |
|
"loss": 0.3425, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 2.366391184573003, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 2.2510565460851418e-05, |
|
"loss": 0.3522, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 2.369146005509642, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 2.2322862084189744e-05, |
|
"loss": 0.3366, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.371900826446281, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 2.2135846195024513e-05, |
|
"loss": 0.3492, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 2.3746556473829203, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 2.194951944855418e-05, |
|
"loss": 0.3396, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 2.3774104683195594, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 2.1763883493878057e-05, |
|
"loss": 0.3376, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 2.3801652892561984, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 2.157893997398146e-05, |
|
"loss": 0.3447, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 2.3829201101928374, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 2.139469052572127e-05, |
|
"loss": 0.3403, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.3856749311294765, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 2.1211136779811402e-05, |
|
"loss": 0.3417, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 2.3884297520661155, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 2.1028280360808407e-05, |
|
"loss": 0.3458, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 2.391184573002755, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 2.0846122887097186e-05, |
|
"loss": 0.3493, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 2.393939393939394, |
|
"grad_norm": 0.375, |
|
"learning_rate": 2.0664665970876496e-05, |
|
"loss": 0.3479, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 2.396694214876033, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 2.0483911218144715e-05, |
|
"loss": 0.3508, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.399449035812672, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 2.0303860228685724e-05, |
|
"loss": 0.3469, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 2.402203856749311, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 2.0124514596054668e-05, |
|
"loss": 0.3447, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 2.4049586776859506, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 1.994587590756397e-05, |
|
"loss": 0.3457, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 2.4077134986225897, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 1.9767945744269034e-05, |
|
"loss": 0.3392, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 2.4104683195592287, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 1.9590725680954513e-05, |
|
"loss": 0.3358, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.4132231404958677, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 1.941421728612023e-05, |
|
"loss": 0.3603, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 2.4159779614325068, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 1.923842212196735e-05, |
|
"loss": 0.3297, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 2.418732782369146, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 1.90633417443845e-05, |
|
"loss": 0.3533, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 2.4214876033057853, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 1.8888977702934085e-05, |
|
"loss": 0.3393, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 2.4242424242424243, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 1.8715331540838487e-05, |
|
"loss": 0.3468, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.4269972451790633, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 1.854240479496643e-05, |
|
"loss": 0.3451, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 2.4297520661157024, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 1.837019899581943e-05, |
|
"loss": 0.354, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 2.4325068870523414, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 1.8198715667518175e-05, |
|
"loss": 0.351, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 2.435261707988981, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 1.8027956327789076e-05, |
|
"loss": 0.3438, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 2.43801652892562, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 1.7857922487950874e-05, |
|
"loss": 0.3542, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.440771349862259, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 1.7688615652901076e-05, |
|
"loss": 0.3375, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 2.443526170798898, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 1.7520037321102966e-05, |
|
"loss": 0.3515, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 2.446280991735537, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 1.7352188984572026e-05, |
|
"loss": 0.3366, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 2.449035812672176, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 1.7185072128862933e-05, |
|
"loss": 0.3351, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 2.4517906336088156, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 1.7018688233056258e-05, |
|
"loss": 0.358, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.4545454545454546, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 1.6853038769745467e-05, |
|
"loss": 0.3571, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 2.4573002754820936, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 1.6688125205023985e-05, |
|
"loss": 0.3429, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 2.4600550964187327, |
|
"grad_norm": 0.375, |
|
"learning_rate": 1.6523948998471973e-05, |
|
"loss": 0.3408, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 2.462809917355372, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 1.6360511603143648e-05, |
|
"loss": 0.3453, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 2.465564738292011, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 1.619781446555422e-05, |
|
"loss": 0.357, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.46831955922865, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 1.6035859025667265e-05, |
|
"loss": 0.3471, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 2.4710743801652892, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 1.587464671688187e-05, |
|
"loss": 0.3484, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 2.4738292011019283, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 1.5714178966020066e-05, |
|
"loss": 0.3379, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 2.4765840220385673, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 1.5554457193313987e-05, |
|
"loss": 0.3482, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 2.479338842975207, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 1.5395482812393514e-05, |
|
"loss": 0.3402, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.482093663911846, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 1.523725723027365e-05, |
|
"loss": 0.3403, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 2.484848484848485, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 1.5079781847342123e-05, |
|
"loss": 0.3519, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.487603305785124, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 1.4923058057346929e-05, |
|
"loss": 0.3507, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 2.490358126721763, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 1.4767087247384059e-05, |
|
"loss": 0.3421, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 2.4931129476584024, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 1.4611870797885197e-05, |
|
"loss": 0.3679, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.4958677685950414, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 1.4457410082605483e-05, |
|
"loss": 0.3449, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 2.4986225895316805, |
|
"grad_norm": 0.375, |
|
"learning_rate": 1.430370646861139e-05, |
|
"loss": 0.347, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 2.5013774104683195, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 1.415076131626859e-05, |
|
"loss": 0.356, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.5041322314049586, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 1.3998575979229944e-05, |
|
"loss": 0.343, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 2.5068870523415976, |
|
"grad_norm": 0.375, |
|
"learning_rate": 1.3847151804423552e-05, |
|
"loss": 0.3509, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.509641873278237, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 1.3696490132040652e-05, |
|
"loss": 0.3465, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 2.512396694214876, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 1.3546592295524074e-05, |
|
"loss": 0.3472, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 2.515151515151515, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 1.339745962155613e-05, |
|
"loss": 0.347, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 2.517906336088154, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 1.3249093430047088e-05, |
|
"loss": 0.3417, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 2.5206611570247937, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 1.3101495034123313e-05, |
|
"loss": 0.3546, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.5234159779614327, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 1.2954665740115779e-05, |
|
"loss": 0.342, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 2.5261707988980717, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 1.280860684754852e-05, |
|
"loss": 0.3425, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 2.5289256198347108, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 1.2663319649127026e-05, |
|
"loss": 0.336, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 2.53168044077135, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 1.2518805430726787e-05, |
|
"loss": 0.351, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 2.534435261707989, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 1.2375065471382064e-05, |
|
"loss": 0.3572, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.537190082644628, |
|
"grad_norm": 0.375, |
|
"learning_rate": 1.2232101043274436e-05, |
|
"loss": 0.3556, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 2.5399449035812673, |
|
"grad_norm": 0.375, |
|
"learning_rate": 1.2089913411721631e-05, |
|
"loss": 0.3488, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 2.5426997245179064, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 1.1948503835166191e-05, |
|
"loss": 0.3513, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 2.5454545454545454, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 1.1807873565164506e-05, |
|
"loss": 0.3409, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 2.5482093663911844, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 1.166802384637561e-05, |
|
"loss": 0.3395, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.550964187327824, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 1.1528955916550188e-05, |
|
"loss": 0.3489, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 2.553719008264463, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 1.139067100651976e-05, |
|
"loss": 0.3444, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 2.556473829201102, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 1.1253170340185493e-05, |
|
"loss": 0.3458, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 2.559228650137741, |
|
"grad_norm": 0.375, |
|
"learning_rate": 1.1116455134507664e-05, |
|
"loss": 0.3469, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 2.56198347107438, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 1.0980526599494733e-05, |
|
"loss": 0.3608, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.564738292011019, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 1.0845385938192686e-05, |
|
"loss": 0.3433, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 2.567493112947658, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 1.0711034346674364e-05, |
|
"loss": 0.3546, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 2.5702479338842976, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 1.057747301402887e-05, |
|
"loss": 0.353, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 2.5730027548209367, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 1.0444703122351095e-05, |
|
"loss": 0.3427, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 2.5757575757575757, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 1.0312725846731175e-05, |
|
"loss": 0.3409, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.5785123966942147, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 1.0181542355244167e-05, |
|
"loss": 0.3534, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 2.581267217630854, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 1.0051153808939685e-05, |
|
"loss": 0.3339, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 2.5840220385674932, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 9.921561361831599e-06, |
|
"loss": 0.3385, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 2.5867768595041323, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 9.792766160887868e-06, |
|
"loss": 0.3451, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 2.5895316804407713, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 9.664769346020297e-06, |
|
"loss": 0.3578, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.5922865013774103, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 9.537572050074618e-06, |
|
"loss": 0.3462, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 2.5950413223140494, |
|
"grad_norm": 0.375, |
|
"learning_rate": 9.411175398820271e-06, |
|
"loss": 0.3459, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 2.597796143250689, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 9.285580510940594e-06, |
|
"loss": 0.3479, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 2.600550964187328, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 9.160788498022777e-06, |
|
"loss": 0.3484, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 2.603305785123967, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 9.036800464548157e-06, |
|
"loss": 0.3279, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.606060606060606, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 8.91361750788241e-06, |
|
"loss": 0.3374, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 2.6088154269972454, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 8.791240718265792e-06, |
|
"loss": 0.3392, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 2.6115702479338845, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 8.669671178803485e-06, |
|
"loss": 0.3455, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 2.6143250688705235, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 8.548909965456065e-06, |
|
"loss": 0.3538, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 2.6170798898071626, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 8.428958147029965e-06, |
|
"loss": 0.3374, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.6198347107438016, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 8.309816785168034e-06, |
|
"loss": 0.3486, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 2.6225895316804406, |
|
"grad_norm": 0.375, |
|
"learning_rate": 8.191486934340031e-06, |
|
"loss": 0.3423, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.6253443526170797, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 8.073969641833445e-06, |
|
"loss": 0.3401, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 2.628099173553719, |
|
"grad_norm": 0.375, |
|
"learning_rate": 7.95726594774413e-06, |
|
"loss": 0.3359, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 2.630853994490358, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 7.841376884967121e-06, |
|
"loss": 0.3478, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.633608815426997, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 7.726303479187501e-06, |
|
"loss": 0.3456, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 2.6363636363636362, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 7.612046748871327e-06, |
|
"loss": 0.3355, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 2.6391184573002757, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 7.498607705256588e-06, |
|
"loss": 0.3293, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 2.6418732782369148, |
|
"grad_norm": 0.375, |
|
"learning_rate": 7.385987352344282e-06, |
|
"loss": 0.3516, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 2.644628099173554, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 7.2741866868895395e-06, |
|
"loss": 0.3428, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.647382920110193, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 7.163206698392744e-06, |
|
"loss": 0.3452, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 2.650137741046832, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 7.053048369090853e-06, |
|
"loss": 0.3405, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.652892561983471, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 6.943712673948644e-06, |
|
"loss": 0.3427, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 2.65564738292011, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 6.835200580650125e-06, |
|
"loss": 0.3546, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 2.6584022038567494, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 6.7275130495899175e-06, |
|
"loss": 0.3489, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.6611570247933884, |
|
"grad_norm": 0.375, |
|
"learning_rate": 6.620651033864844e-06, |
|
"loss": 0.3522, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 2.6639118457300275, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 6.514615479265396e-06, |
|
"loss": 0.3381, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 6.409407324267447e-06, |
|
"loss": 0.3495, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 2.669421487603306, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 6.3050275000238414e-06, |
|
"loss": 0.3355, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 2.672176308539945, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 6.201476930356264e-06, |
|
"loss": 0.3351, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.674931129476584, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 6.098756531747041e-06, |
|
"loss": 0.3415, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 2.677685950413223, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 5.996867213330992e-06, |
|
"loss": 0.3412, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 2.680440771349862, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 5.895809876887326e-06, |
|
"loss": 0.3507, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 2.683195592286501, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 5.795585416831828e-06, |
|
"loss": 0.3415, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 2.6859504132231407, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 5.696194720208792e-06, |
|
"loss": 0.3489, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.6887052341597797, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 5.597638666683258e-06, |
|
"loss": 0.3528, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 2.6914600550964187, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 5.499918128533155e-06, |
|
"loss": 0.339, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 2.6942148760330578, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 5.403033970641647e-06, |
|
"loss": 0.3456, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 2.6969696969696972, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 5.306987050489442e-06, |
|
"loss": 0.3514, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 2.6997245179063363, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 5.211778218147201e-06, |
|
"loss": 0.3339, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.7024793388429753, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 5.1174083162680465e-06, |
|
"loss": 0.3469, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 2.7052341597796143, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 5.023878180080055e-06, |
|
"loss": 0.3561, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 2.7079889807162534, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 4.931188637378914e-06, |
|
"loss": 0.3526, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 2.7107438016528924, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 4.839340508520562e-06, |
|
"loss": 0.3433, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 2.7134986225895315, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 4.748334606413951e-06, |
|
"loss": 0.3222, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.716253443526171, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 4.658171736513828e-06, |
|
"loss": 0.3655, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 2.71900826446281, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 4.568852696813619e-06, |
|
"loss": 0.3383, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 2.721763085399449, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 4.4803782778383816e-06, |
|
"loss": 0.3581, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 2.724517906336088, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 4.392749262637752e-06, |
|
"loss": 0.3525, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 2.7272727272727275, |
|
"grad_norm": 0.375, |
|
"learning_rate": 4.305966426779118e-06, |
|
"loss": 0.3402, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.7300275482093666, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 4.220030538340636e-06, |
|
"loss": 0.357, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 2.7327823691460056, |
|
"grad_norm": 0.375, |
|
"learning_rate": 4.1349423579045145e-06, |
|
"loss": 0.3313, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 2.7355371900826446, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 4.050702638550275e-06, |
|
"loss": 0.3541, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 2.7382920110192837, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 3.967312125847999e-06, |
|
"loss": 0.3433, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 2.7410468319559227, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 3.884771557851885e-06, |
|
"loss": 0.3429, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.7438016528925617, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 3.8030816650935776e-06, |
|
"loss": 0.3543, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 2.746556473829201, |
|
"grad_norm": 0.375, |
|
"learning_rate": 3.722243170575801e-06, |
|
"loss": 0.3393, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 2.7493112947658402, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 3.64225678976583e-06, |
|
"loss": 0.3469, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 2.7520661157024793, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 3.5631232305893046e-06, |
|
"loss": 0.3415, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 2.7548209366391183, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 3.4848431934239255e-06, |
|
"loss": 0.3452, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.757575757575758, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 3.40741737109318e-06, |
|
"loss": 0.3447, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 2.760330578512397, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 3.330846448860259e-06, |
|
"loss": 0.3475, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 2.763085399449036, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 3.2551311044220157e-06, |
|
"loss": 0.3577, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 2.765840220385675, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 3.18027200790294e-06, |
|
"loss": 0.347, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 2.768595041322314, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 3.1062698218492724e-06, |
|
"loss": 0.3375, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.771349862258953, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 3.033125201223019e-06, |
|
"loss": 0.3459, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 2.7741046831955924, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 2.9608387933963035e-06, |
|
"loss": 0.338, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 2.7768595041322315, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 2.889411238145545e-06, |
|
"loss": 0.3471, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 2.7796143250688705, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 2.818843167645835e-06, |
|
"loss": 0.3461, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 2.7823691460055096, |
|
"grad_norm": 0.375, |
|
"learning_rate": 2.7491352064653143e-06, |
|
"loss": 0.3487, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.785123966942149, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 2.6802879715596585e-06, |
|
"loss": 0.3351, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 2.787878787878788, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 2.612302072266637e-06, |
|
"loss": 0.3424, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 2.790633608815427, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 2.5451781103006944e-06, |
|
"loss": 0.3343, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.793388429752066, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 2.4789166797476228e-06, |
|
"loss": 0.3424, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 2.796143250688705, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 2.41351836705932e-06, |
|
"loss": 0.3461, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.798898071625344, |
|
"grad_norm": 0.375, |
|
"learning_rate": 2.3489837510485946e-06, |
|
"loss": 0.3513, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 2.8016528925619832, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 2.2853134028840594e-06, |
|
"loss": 0.3559, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 2.8044077134986227, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 2.2225078860849904e-06, |
|
"loss": 0.3562, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 2.8071625344352618, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 2.160567756516507e-06, |
|
"loss": 0.3552, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 2.809917355371901, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 2.0994935623844692e-06, |
|
"loss": 0.3431, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.81267217630854, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 2.039285844230765e-06, |
|
"loss": 0.3501, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 2.8154269972451793, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 1.9799451349284183e-06, |
|
"loss": 0.3336, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 2.8181818181818183, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 1.921471959676957e-06, |
|
"loss": 0.3488, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 2.8209366391184574, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 1.8638668359977296e-06, |
|
"loss": 0.3534, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 2.8236914600550964, |
|
"grad_norm": 0.375, |
|
"learning_rate": 1.8071302737293295e-06, |
|
"loss": 0.3486, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.8264462809917354, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 1.751262775023077e-06, |
|
"loss": 0.3549, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.8292011019283745, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 1.6962648343385568e-06, |
|
"loss": 0.3465, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 2.8319559228650135, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 1.642136938439287e-06, |
|
"loss": 0.3466, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.834710743801653, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 1.5888795663883904e-06, |
|
"loss": 0.3452, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 2.837465564738292, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 1.5364931895443413e-06, |
|
"loss": 0.3332, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.840220385674931, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 1.4849782715568139e-06, |
|
"loss": 0.3395, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 2.84297520661157, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 1.4343352683625411e-06, |
|
"loss": 0.3416, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.8457300275482096, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 1.3845646281813507e-06, |
|
"loss": 0.3235, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 2.8484848484848486, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 1.3356667915121025e-06, |
|
"loss": 0.3409, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 2.8512396694214877, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 1.2876421911288905e-06, |
|
"loss": 0.351, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.8539944903581267, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 1.240491252077125e-06, |
|
"loss": 0.3493, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 2.8567493112947657, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 1.1942143916698457e-06, |
|
"loss": 0.3433, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 2.8595041322314048, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 1.148812019483958e-06, |
|
"loss": 0.3431, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 2.8622589531680442, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 1.104284537356659e-06, |
|
"loss": 0.3519, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 2.8650137741046833, |
|
"grad_norm": 0.375, |
|
"learning_rate": 1.060632339381873e-06, |
|
"loss": 0.3411, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.8677685950413223, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 1.0178558119067315e-06, |
|
"loss": 0.3469, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 2.8705234159779613, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 9.759553335281891e-07, |
|
"loss": 0.3529, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 2.873278236914601, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 9.349312750896455e-07, |
|
"loss": 0.3446, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 2.87603305785124, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 8.947839996777285e-07, |
|
"loss": 0.3469, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 2.878787878787879, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 8.555138626189618e-07, |
|
"loss": 0.3447, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.881542699724518, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 8.171212114767345e-07, |
|
"loss": 0.336, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 2.884297520661157, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 7.796063860481595e-07, |
|
"loss": 0.343, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 2.887052341597796, |
|
"grad_norm": 0.375, |
|
"learning_rate": 7.429697183610862e-07, |
|
"loss": 0.354, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.889807162534435, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 7.072115326711704e-07, |
|
"loss": 0.3513, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 2.8925619834710745, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 6.723321454590092e-07, |
|
"loss": 0.3429, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.8953168044077136, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 6.383318654272774e-07, |
|
"loss": 0.3525, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.8980716253443526, |
|
"grad_norm": 0.375, |
|
"learning_rate": 6.052109934980843e-07, |
|
"loss": 0.3558, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.9008264462809916, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 5.729698228102653e-07, |
|
"loss": 0.3477, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.903581267217631, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 5.416086387167951e-07, |
|
"loss": 0.3367, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.90633608815427, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 5.111277187822339e-07, |
|
"loss": 0.3476, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.909090909090909, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 4.815273327803182e-07, |
|
"loss": 0.3449, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.911845730027548, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 4.5280774269154115e-07, |
|
"loss": 0.3314, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 2.9146005509641872, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 4.2496920270085337e-07, |
|
"loss": 0.3634, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.9173553719008263, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 3.9801195919541014e-07, |
|
"loss": 0.3503, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 2.9201101928374653, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 3.719362507623614e-07, |
|
"loss": 0.3404, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.922865013774105, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 3.467423081867649e-07, |
|
"loss": 0.3349, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 2.925619834710744, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 3.224303544495766e-07, |
|
"loss": 0.3448, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.928374655647383, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 2.990006047255967e-07, |
|
"loss": 0.3464, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 2.931129476584022, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 2.764532663816266e-07, |
|
"loss": 0.3602, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 2.9338842975206614, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 2.547885389746485e-07, |
|
"loss": 0.3501, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.9366391184573004, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 2.3400661424998194e-07, |
|
"loss": 0.3363, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 2.9393939393939394, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 2.141076761396521e-07, |
|
"loss": 0.3449, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 2.9421487603305785, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 1.9509190076074657e-07, |
|
"loss": 0.3475, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 2.9449035812672175, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 1.7695945641386102e-07, |
|
"loss": 0.3474, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 2.9476584022038566, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 1.5971050358158936e-07, |
|
"loss": 0.3409, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.950413223140496, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 1.4334519492711362e-07, |
|
"loss": 0.3497, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 2.953168044077135, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 1.278636752928497e-07, |
|
"loss": 0.3364, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 2.955922865013774, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 1.1326608169920372e-07, |
|
"loss": 0.3427, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 2.958677685950413, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 9.955254334328423e-08, |
|
"loss": 0.3459, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 2.9614325068870526, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 8.672318159782533e-08, |
|
"loss": 0.3483, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.9641873278236917, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 7.47781100100875e-08, |
|
"loss": 0.441, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 2.9669421487603307, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 6.37174343008251e-08, |
|
"loss": 0.3425, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 2.9696969696969697, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 5.354125236343155e-08, |
|
"loss": 0.3562, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 2.9724517906336088, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 4.424965426298444e-08, |
|
"loss": 0.3551, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 2.975206611570248, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 3.584272223546847e-08, |
|
"loss": 0.3579, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.977961432506887, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 2.8320530687098166e-08, |
|
"loss": 0.3478, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 2.9807162534435263, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 2.168314619359624e-08, |
|
"loss": 0.3547, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 2.9834710743801653, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 1.593062749967178e-08, |
|
"loss": 0.3494, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 2.9862258953168044, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 1.1063025518409653e-08, |
|
"loss": 0.3469, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 2.9889807162534434, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 7.080383330915208e-09, |
|
"loss": 0.3602, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.991735537190083, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 3.982736185859093e-09, |
|
"loss": 0.345, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 2.994490358126722, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 1.7701114991997003e-09, |
|
"loss": 0.3427, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 2.997245179063361, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 4.4252885396112163e-10, |
|
"loss": 0.3469, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.0, |
|
"loss": 0.3415, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_train_loss": 0.4371190667152405, |
|
"eval_train_runtime": 8.4038, |
|
"eval_train_samples_per_second": 160.284, |
|
"eval_train_steps_per_second": 20.11, |
|
"step": 1089 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1089, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.4213817871368192e+17, |
|
"train_batch_size": 60, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|