|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.991428360304441, |
|
"eval_steps": 500, |
|
"global_step": 1688, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.921568627450981e-07, |
|
"loss": 2.8563, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 7.843137254901962e-07, |
|
"loss": 3.0208, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.1764705882352942e-06, |
|
"loss": 4.0636, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.5686274509803923e-06, |
|
"loss": 3.3144, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.96078431372549e-06, |
|
"loss": 3.9967, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.3529411764705885e-06, |
|
"loss": 9.4344, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.7450980392156867e-06, |
|
"loss": 3.5008, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.1372549019607846e-06, |
|
"loss": 3.6467, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.529411764705883e-06, |
|
"loss": 2.9515, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.92156862745098e-06, |
|
"loss": 3.2709, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.313725490196079e-06, |
|
"loss": 4.4522, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.705882352941177e-06, |
|
"loss": 3.2108, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.098039215686274e-06, |
|
"loss": 2.8653, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.4901960784313735e-06, |
|
"loss": 2.9295, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5.882352941176471e-06, |
|
"loss": 3.234, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.274509803921569e-06, |
|
"loss": 3.3388, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 2.8298, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.058823529411766e-06, |
|
"loss": 6.0088, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.450980392156863e-06, |
|
"loss": 2.7609, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 7.84313725490196e-06, |
|
"loss": 2.3344, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.23529411764706e-06, |
|
"loss": 2.1906, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.627450980392157e-06, |
|
"loss": 1.7675, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.019607843137256e-06, |
|
"loss": 1.6679, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.411764705882354e-06, |
|
"loss": 1.7167, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.803921568627451e-06, |
|
"loss": 1.7548, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.0196078431372549e-05, |
|
"loss": 1.4444, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.0588235294117648e-05, |
|
"loss": 3.8395, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.0980392156862747e-05, |
|
"loss": 1.5312, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1372549019607844e-05, |
|
"loss": 1.4848, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1764705882352942e-05, |
|
"loss": 1.2994, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.215686274509804e-05, |
|
"loss": 1.1449, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.2549019607843138e-05, |
|
"loss": 1.1011, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.2941176470588238e-05, |
|
"loss": 1.2697, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.7739, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3725490196078432e-05, |
|
"loss": 1.4039, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4117647058823532e-05, |
|
"loss": 1.3035, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4509803921568629e-05, |
|
"loss": 1.1648, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4901960784313726e-05, |
|
"loss": 1.0714, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.5294117647058822e-05, |
|
"loss": 1.0585, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.568627450980392e-05, |
|
"loss": 1.3108, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.607843137254902e-05, |
|
"loss": 1.4937, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.647058823529412e-05, |
|
"loss": 0.9606, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.686274509803922e-05, |
|
"loss": 1.1518, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.7254901960784314e-05, |
|
"loss": 0.9404, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.7647058823529414e-05, |
|
"loss": 1.1609, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.8039215686274513e-05, |
|
"loss": 0.9896, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.843137254901961e-05, |
|
"loss": 1.0884, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.8823529411764708e-05, |
|
"loss": 1.0914, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9215686274509807e-05, |
|
"loss": 1.2738, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9607843137254903e-05, |
|
"loss": 1.0804, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2e-05, |
|
"loss": 1.0388, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9987782529016497e-05, |
|
"loss": 1.2748, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.997556505803299e-05, |
|
"loss": 1.128, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9963347587049484e-05, |
|
"loss": 1.082, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9951130116065975e-05, |
|
"loss": 1.084, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.993891264508247e-05, |
|
"loss": 0.9747, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9926695174098962e-05, |
|
"loss": 0.9725, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9914477703115457e-05, |
|
"loss": 1.0974, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9902260232131952e-05, |
|
"loss": 1.2239, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9890042761148444e-05, |
|
"loss": 1.2422, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.987782529016494e-05, |
|
"loss": 1.0937, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.986560781918143e-05, |
|
"loss": 1.2242, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9853390348197926e-05, |
|
"loss": 1.0892, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9841172877214418e-05, |
|
"loss": 1.3148, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9828955406230913e-05, |
|
"loss": 1.3904, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9816737935247404e-05, |
|
"loss": 1.1273, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.98045204642639e-05, |
|
"loss": 1.0521, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.979230299328039e-05, |
|
"loss": 1.0674, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9780085522296886e-05, |
|
"loss": 1.3415, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9767868051313378e-05, |
|
"loss": 1.9016, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9755650580329873e-05, |
|
"loss": 1.0203, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9743433109346365e-05, |
|
"loss": 1.1761, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.973121563836286e-05, |
|
"loss": 1.0342, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9718998167379355e-05, |
|
"loss": 1.275, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.970678069639585e-05, |
|
"loss": 0.9881, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9694563225412342e-05, |
|
"loss": 1.242, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9682345754428837e-05, |
|
"loss": 1.2509, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.967012828344533e-05, |
|
"loss": 0.9321, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9657910812461824e-05, |
|
"loss": 1.1749, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9645693341478315e-05, |
|
"loss": 1.0389, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.963347587049481e-05, |
|
"loss": 0.979, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9621258399511302e-05, |
|
"loss": 1.0309, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9609040928527797e-05, |
|
"loss": 0.8478, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.959682345754429e-05, |
|
"loss": 1.1466, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9584605986560784e-05, |
|
"loss": 1.0715, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9572388515577276e-05, |
|
"loss": 1.0457, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.956017104459377e-05, |
|
"loss": 0.9272, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9547953573610263e-05, |
|
"loss": 1.7554, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9535736102626758e-05, |
|
"loss": 1.3094, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9523518631643253e-05, |
|
"loss": 0.8661, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9511301160659744e-05, |
|
"loss": 1.1597, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.949908368967624e-05, |
|
"loss": 1.1494, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.948686621869273e-05, |
|
"loss": 1.0246, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9474648747709226e-05, |
|
"loss": 1.0623, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9462431276725718e-05, |
|
"loss": 1.016, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9450213805742213e-05, |
|
"loss": 1.3593, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9437996334758705e-05, |
|
"loss": 1.1301, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.94257788637752e-05, |
|
"loss": 1.188, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.941356139279169e-05, |
|
"loss": 0.8702, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9401343921808187e-05, |
|
"loss": 1.1883, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.938912645082468e-05, |
|
"loss": 0.9031, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9376908979841174e-05, |
|
"loss": 1.238, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.936469150885767e-05, |
|
"loss": 0.9101, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9352474037874164e-05, |
|
"loss": 0.9643, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9340256566890655e-05, |
|
"loss": 1.081, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.932803909590715e-05, |
|
"loss": 1.1242, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9315821624923642e-05, |
|
"loss": 0.9434, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9303604153940137e-05, |
|
"loss": 1.0686, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.929138668295663e-05, |
|
"loss": 1.1023, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9279169211973124e-05, |
|
"loss": 1.0204, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9266951740989616e-05, |
|
"loss": 1.0261, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.925473427000611e-05, |
|
"loss": 0.905, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9242516799022603e-05, |
|
"loss": 1.2208, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9230299328039098e-05, |
|
"loss": 1.0668, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.921808185705559e-05, |
|
"loss": 0.9994, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9205864386072085e-05, |
|
"loss": 1.0346, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9193646915088576e-05, |
|
"loss": 1.4384, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.918142944410507e-05, |
|
"loss": 1.1412, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9169211973121567e-05, |
|
"loss": 1.1746, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9156994502138058e-05, |
|
"loss": 0.9614, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9144777031154553e-05, |
|
"loss": 1.0198, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.913255956017105e-05, |
|
"loss": 0.9384, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.912034208918754e-05, |
|
"loss": 1.1136, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9108124618204035e-05, |
|
"loss": 1.2213, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9095907147220527e-05, |
|
"loss": 0.9353, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9083689676237022e-05, |
|
"loss": 1.0858, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9071472205253514e-05, |
|
"loss": 1.209, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.905925473427001e-05, |
|
"loss": 1.006, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.90470372632865e-05, |
|
"loss": 1.1164, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9034819792302996e-05, |
|
"loss": 1.0949, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9022602321319487e-05, |
|
"loss": 0.9299, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9010384850335982e-05, |
|
"loss": 0.7959, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8998167379352474e-05, |
|
"loss": 0.8739, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.898594990836897e-05, |
|
"loss": 0.9052, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8973732437385464e-05, |
|
"loss": 0.7539, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8961514966401956e-05, |
|
"loss": 1.2152, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.894929749541845e-05, |
|
"loss": 0.9702, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8937080024434943e-05, |
|
"loss": 1.0243, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8924862553451438e-05, |
|
"loss": 0.9604, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.891264508246793e-05, |
|
"loss": 0.8724, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8900427611484425e-05, |
|
"loss": 1.0891, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8888210140500916e-05, |
|
"loss": 0.8072, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.887599266951741e-05, |
|
"loss": 0.9814, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8863775198533903e-05, |
|
"loss": 1.8261, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.88515577275504e-05, |
|
"loss": 0.9005, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.883934025656689e-05, |
|
"loss": 0.9833, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8827122785583385e-05, |
|
"loss": 0.7715, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8814905314599877e-05, |
|
"loss": 1.0014, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8802687843616375e-05, |
|
"loss": 0.9512, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8790470372632867e-05, |
|
"loss": 0.9101, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8778252901649362e-05, |
|
"loss": 0.8743, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8766035430665854e-05, |
|
"loss": 1.1239, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.875381795968235e-05, |
|
"loss": 0.7849, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.874160048869884e-05, |
|
"loss": 0.9635, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8729383017715336e-05, |
|
"loss": 1.0813, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8717165546731827e-05, |
|
"loss": 1.216, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8704948075748323e-05, |
|
"loss": 1.4687, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8692730604764814e-05, |
|
"loss": 1.2118, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.868051313378131e-05, |
|
"loss": 0.9839, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.86682956627978e-05, |
|
"loss": 1.1509, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8656078191814296e-05, |
|
"loss": 1.1231, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8643860720830788e-05, |
|
"loss": 1.1005, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8631643249847283e-05, |
|
"loss": 1.0311, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8619425778863778e-05, |
|
"loss": 1.007, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.860720830788027e-05, |
|
"loss": 1.0144, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8594990836896765e-05, |
|
"loss": 1.025, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8582773365913257e-05, |
|
"loss": 1.079, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.857055589492975e-05, |
|
"loss": 0.9763, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8558338423946243e-05, |
|
"loss": 1.0278, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.854612095296274e-05, |
|
"loss": 1.1604, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.853390348197923e-05, |
|
"loss": 0.9794, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8521686010995725e-05, |
|
"loss": 1.1152, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.850946854001222e-05, |
|
"loss": 1.0736, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8497251069028712e-05, |
|
"loss": 1.1069, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8485033598045207e-05, |
|
"loss": 1.0072, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.84728161270617e-05, |
|
"loss": 1.0369, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.8460598656078194e-05, |
|
"loss": 0.8805, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.8448381185094686e-05, |
|
"loss": 1.014, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.843616371411118e-05, |
|
"loss": 1.072, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.8423946243127676e-05, |
|
"loss": 0.9177, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.8411728772144168e-05, |
|
"loss": 0.8555, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.8399511301160663e-05, |
|
"loss": 1.0342, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.8387293830177154e-05, |
|
"loss": 0.9845, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.837507635919365e-05, |
|
"loss": 0.8063, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.836285888821014e-05, |
|
"loss": 1.2795, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.8350641417226636e-05, |
|
"loss": 1.0151, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.8338423946243128e-05, |
|
"loss": 0.8541, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.8326206475259623e-05, |
|
"loss": 0.9803, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.8313989004276115e-05, |
|
"loss": 0.7711, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.830177153329261e-05, |
|
"loss": 2.1584, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.82895540623091e-05, |
|
"loss": 0.7788, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.8277336591325597e-05, |
|
"loss": 1.1427, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.826511912034209e-05, |
|
"loss": 1.0952, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.8252901649358587e-05, |
|
"loss": 0.8876, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.824068417837508e-05, |
|
"loss": 0.9227, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.8228466707391574e-05, |
|
"loss": 0.8085, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.8216249236408065e-05, |
|
"loss": 0.7312, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.820403176542456e-05, |
|
"loss": 1.1112, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.8191814294441052e-05, |
|
"loss": 1.5682, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.8179596823457547e-05, |
|
"loss": 1.2667, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.816737935247404e-05, |
|
"loss": 1.1243, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.8155161881490534e-05, |
|
"loss": 0.8359, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.8142944410507026e-05, |
|
"loss": 1.1438, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.813072693952352e-05, |
|
"loss": 0.7886, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.8118509468540013e-05, |
|
"loss": 0.9352, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.8106291997556508e-05, |
|
"loss": 0.8506, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.8094074526573e-05, |
|
"loss": 1.0453, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.8081857055589494e-05, |
|
"loss": 1.0867, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.806963958460599e-05, |
|
"loss": 0.9081, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.805742211362248e-05, |
|
"loss": 0.913, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.8045204642638976e-05, |
|
"loss": 1.0962, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.8032987171655468e-05, |
|
"loss": 0.9777, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.8020769700671963e-05, |
|
"loss": 1.0239, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.8008552229688455e-05, |
|
"loss": 1.203, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.799633475870495e-05, |
|
"loss": 0.9004, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7984117287721442e-05, |
|
"loss": 0.8988, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7971899816737937e-05, |
|
"loss": 1.143, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.795968234575443e-05, |
|
"loss": 1.0774, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.7947464874770924e-05, |
|
"loss": 1.053, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.7935247403787415e-05, |
|
"loss": 1.0897, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.792302993280391e-05, |
|
"loss": 1.0885, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.7910812461820402e-05, |
|
"loss": 1.1508, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.7898594990836897e-05, |
|
"loss": 1.0426, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.7886377519853392e-05, |
|
"loss": 1.0117, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.7874160048869887e-05, |
|
"loss": 0.8381, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.786194257788638e-05, |
|
"loss": 0.8952, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.7849725106902874e-05, |
|
"loss": 0.8793, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.7837507635919366e-05, |
|
"loss": 1.0481, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.782529016493586e-05, |
|
"loss": 1.0114, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.7813072693952353e-05, |
|
"loss": 1.1688, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.7800855222968848e-05, |
|
"loss": 1.1468, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.778863775198534e-05, |
|
"loss": 1.208, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.7776420281001835e-05, |
|
"loss": 1.111, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.7764202810018326e-05, |
|
"loss": 0.9851, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.775198533903482e-05, |
|
"loss": 0.8821, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.7739767868051313e-05, |
|
"loss": 1.0106, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.7727550397067808e-05, |
|
"loss": 0.9257, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.77153329260843e-05, |
|
"loss": 0.9688, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.7703115455100795e-05, |
|
"loss": 1.1994, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.769089798411729e-05, |
|
"loss": 1.0435, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.7678680513133785e-05, |
|
"loss": 0.9776, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.7666463042150277e-05, |
|
"loss": 1.0205, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.7654245571166772e-05, |
|
"loss": 1.0264, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.7642028100183264e-05, |
|
"loss": 0.8581, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.762981062919976e-05, |
|
"loss": 0.8968, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.761759315821625e-05, |
|
"loss": 0.9426, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.7605375687232746e-05, |
|
"loss": 1.1221, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7593158216249237e-05, |
|
"loss": 1.4892, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7580940745265732e-05, |
|
"loss": 1.0382, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7568723274282224e-05, |
|
"loss": 0.9486, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.755650580329872e-05, |
|
"loss": 1.1362, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.754428833231521e-05, |
|
"loss": 1.0047, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7532070861331706e-05, |
|
"loss": 1.156, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.75198533903482e-05, |
|
"loss": 1.0773, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7507635919364693e-05, |
|
"loss": 0.8051, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.7495418448381188e-05, |
|
"loss": 1.0409, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.748320097739768e-05, |
|
"loss": 0.7593, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.7470983506414175e-05, |
|
"loss": 0.8239, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.7458766035430666e-05, |
|
"loss": 1.7364, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.744654856444716e-05, |
|
"loss": 0.9746, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.7434331093463653e-05, |
|
"loss": 0.8927, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.742211362248015e-05, |
|
"loss": 0.8916, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.740989615149664e-05, |
|
"loss": 1.0186, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.7397678680513135e-05, |
|
"loss": 1.0606, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.7385461209529627e-05, |
|
"loss": 0.9529, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.7373243738546122e-05, |
|
"loss": 0.795, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.7361026267562614e-05, |
|
"loss": 1.0289, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.734880879657911e-05, |
|
"loss": 0.8401, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.7336591325595604e-05, |
|
"loss": 1.2338, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.73243738546121e-05, |
|
"loss": 0.9777, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.731215638362859e-05, |
|
"loss": 1.1214, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.7299938912645086e-05, |
|
"loss": 1.1373, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.7287721441661577e-05, |
|
"loss": 0.8721, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.7275503970678073e-05, |
|
"loss": 0.9177, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.7263286499694564e-05, |
|
"loss": 1.0082, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.725106902871106e-05, |
|
"loss": 1.0337, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.723885155772755e-05, |
|
"loss": 0.9424, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7226634086744046e-05, |
|
"loss": 0.8977, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7214416615760538e-05, |
|
"loss": 1.3503, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7202199144777033e-05, |
|
"loss": 0.9975, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7189981673793525e-05, |
|
"loss": 0.958, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.717776420281002e-05, |
|
"loss": 0.9042, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.716554673182651e-05, |
|
"loss": 0.9128, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.7153329260843007e-05, |
|
"loss": 0.9833, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.71411117898595e-05, |
|
"loss": 0.9175, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.7128894318875993e-05, |
|
"loss": 1.053, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.711667684789249e-05, |
|
"loss": 1.0271, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.710445937690898e-05, |
|
"loss": 0.8472, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.7092241905925475e-05, |
|
"loss": 0.937, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7080024434941967e-05, |
|
"loss": 0.8678, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7067806963958462e-05, |
|
"loss": 1.1119, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7055589492974954e-05, |
|
"loss": 0.8468, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.704337202199145e-05, |
|
"loss": 1.135, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7031154551007944e-05, |
|
"loss": 0.9174, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7018937080024436e-05, |
|
"loss": 1.0927, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.700671960904093e-05, |
|
"loss": 0.9993, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.6994502138057422e-05, |
|
"loss": 0.7849, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.6982284667073918e-05, |
|
"loss": 0.88, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.6970067196090413e-05, |
|
"loss": 0.9667, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.6957849725106904e-05, |
|
"loss": 0.9032, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.69456322541234e-05, |
|
"loss": 0.9171, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.693341478313989e-05, |
|
"loss": 1.0589, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.6921197312156386e-05, |
|
"loss": 1.1506, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.6908979841172878e-05, |
|
"loss": 1.0669, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.6896762370189373e-05, |
|
"loss": 0.8916, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.6884544899205865e-05, |
|
"loss": 0.701, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.687232742822236e-05, |
|
"loss": 0.9326, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.686010995723885e-05, |
|
"loss": 0.975, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.6847892486255347e-05, |
|
"loss": 0.8561, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.683567501527184e-05, |
|
"loss": 0.8815, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.6823457544288334e-05, |
|
"loss": 0.8951, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.6811240073304825e-05, |
|
"loss": 0.9312, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.679902260232132e-05, |
|
"loss": 0.9607, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.6786805131337815e-05, |
|
"loss": 1.0822, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.677458766035431e-05, |
|
"loss": 0.9301, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.6762370189370802e-05, |
|
"loss": 0.7862, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.6750152718387297e-05, |
|
"loss": 0.8397, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.673793524740379e-05, |
|
"loss": 1.2027, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.6725717776420284e-05, |
|
"loss": 0.9506, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.6713500305436776e-05, |
|
"loss": 0.7935, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.670128283445327e-05, |
|
"loss": 0.9729, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.6689065363469763e-05, |
|
"loss": 1.023, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.6676847892486258e-05, |
|
"loss": 1.1224, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.666463042150275e-05, |
|
"loss": 0.9656, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.6652412950519245e-05, |
|
"loss": 0.9113, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.6640195479535736e-05, |
|
"loss": 0.9026, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.662797800855223e-05, |
|
"loss": 0.8922, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.6615760537568723e-05, |
|
"loss": 0.9155, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.6603543066585218e-05, |
|
"loss": 0.9721, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.6591325595601713e-05, |
|
"loss": 1.2436, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.6579108124618205e-05, |
|
"loss": 0.818, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.65668906536347e-05, |
|
"loss": 1.1218, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.6554673182651192e-05, |
|
"loss": 1.0335, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.6542455711667687e-05, |
|
"loss": 1.0174, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.653023824068418e-05, |
|
"loss": 1.0147, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.6518020769700674e-05, |
|
"loss": 0.8565, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.6505803298717165e-05, |
|
"loss": 1.2244, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.649358582773366e-05, |
|
"loss": 0.9974, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.6481368356750152e-05, |
|
"loss": 1.2834, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.6469150885766647e-05, |
|
"loss": 1.2875, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.645693341478314e-05, |
|
"loss": 0.7614, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.6444715943799634e-05, |
|
"loss": 1.072, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.6432498472816126e-05, |
|
"loss": 1.1406, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.6420281001832624e-05, |
|
"loss": 0.919, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6408063530849116e-05, |
|
"loss": 1.7385, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.639584605986561e-05, |
|
"loss": 0.7223, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6383628588882103e-05, |
|
"loss": 0.9326, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6371411117898598e-05, |
|
"loss": 0.7997, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.635919364691509e-05, |
|
"loss": 0.8375, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6346976175931585e-05, |
|
"loss": 0.781, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6334758704948076e-05, |
|
"loss": 1.1172, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.632254123396457e-05, |
|
"loss": 0.9348, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6310323762981063e-05, |
|
"loss": 1.0187, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6298106291997558e-05, |
|
"loss": 1.0208, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.628588882101405e-05, |
|
"loss": 0.7895, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6273671350030545e-05, |
|
"loss": 0.9787, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6261453879047037e-05, |
|
"loss": 0.7512, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6249236408063532e-05, |
|
"loss": 0.8968, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6237018937080027e-05, |
|
"loss": 0.8869, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.622480146609652e-05, |
|
"loss": 0.8698, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6212583995113014e-05, |
|
"loss": 0.9319, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.620036652412951e-05, |
|
"loss": 0.9345, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6188149053146e-05, |
|
"loss": 0.9955, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6175931582162496e-05, |
|
"loss": 0.7471, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6163714111178987e-05, |
|
"loss": 1.7239, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6151496640195482e-05, |
|
"loss": 1.0179, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6139279169211974e-05, |
|
"loss": 0.8774, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.612706169822847e-05, |
|
"loss": 1.031, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.611484422724496e-05, |
|
"loss": 0.9079, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6102626756261456e-05, |
|
"loss": 0.7725, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6090409285277948e-05, |
|
"loss": 0.8754, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6078191814294443e-05, |
|
"loss": 1.0597, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6065974343310935e-05, |
|
"loss": 0.8674, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.605375687232743e-05, |
|
"loss": 0.8516, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6041539401343925e-05, |
|
"loss": 0.9218, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6029321930360416e-05, |
|
"loss": 0.921, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.601710445937691e-05, |
|
"loss": 0.8996, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6004886988393403e-05, |
|
"loss": 1.0742, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.59926695174099e-05, |
|
"loss": 0.9436, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.598045204642639e-05, |
|
"loss": 1.1556, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.5968234575442885e-05, |
|
"loss": 1.0067, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.5956017104459377e-05, |
|
"loss": 1.0641, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.5943799633475872e-05, |
|
"loss": 0.9416, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.5931582162492364e-05, |
|
"loss": 1.1844, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.591936469150886e-05, |
|
"loss": 1.0093, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.590714722052535e-05, |
|
"loss": 1.6791, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.5894929749541846e-05, |
|
"loss": 1.0131, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.5882712278558337e-05, |
|
"loss": 0.9257, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.5870494807574836e-05, |
|
"loss": 0.9159, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.5858277336591327e-05, |
|
"loss": 0.9818, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.5846059865607823e-05, |
|
"loss": 0.7884, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.5833842394624314e-05, |
|
"loss": 0.996, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.582162492364081e-05, |
|
"loss": 1.0875, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.58094074526573e-05, |
|
"loss": 0.9321, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.5797189981673796e-05, |
|
"loss": 1.249, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.5784972510690288e-05, |
|
"loss": 1.0016, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.5772755039706783e-05, |
|
"loss": 0.6872, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.5760537568723275e-05, |
|
"loss": 0.9661, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.574832009773977e-05, |
|
"loss": 0.8453, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.573610262675626e-05, |
|
"loss": 1.1277, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5723885155772757e-05, |
|
"loss": 1.1576, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5711667684789248e-05, |
|
"loss": 0.7893, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5699450213805743e-05, |
|
"loss": 0.9481, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.568723274282224e-05, |
|
"loss": 0.8554, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.567501527183873e-05, |
|
"loss": 0.8359, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5662797800855225e-05, |
|
"loss": 1.0039, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5650580329871717e-05, |
|
"loss": 0.9273, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5638362858888212e-05, |
|
"loss": 0.7658, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5626145387904704e-05, |
|
"loss": 0.9904, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.56139279169212e-05, |
|
"loss": 0.8476, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.560171044593769e-05, |
|
"loss": 0.9513, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5589492974954186e-05, |
|
"loss": 0.9458, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.557727550397068e-05, |
|
"loss": 0.9033, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5565058032987173e-05, |
|
"loss": 1.0079, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5552840562003668e-05, |
|
"loss": 0.7969, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.554062309102016e-05, |
|
"loss": 0.7405, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5528405620036654e-05, |
|
"loss": 1.0899, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5516188149053146e-05, |
|
"loss": 0.8311, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.550397067806964e-05, |
|
"loss": 0.6977, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5491753207086136e-05, |
|
"loss": 0.8898, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5479535736102628e-05, |
|
"loss": 0.9926, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5467318265119123e-05, |
|
"loss": 1.066, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5455100794135615e-05, |
|
"loss": 1.269, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.544288332315211e-05, |
|
"loss": 0.8965, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.54306658521686e-05, |
|
"loss": 0.8865, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5418448381185097e-05, |
|
"loss": 0.8984, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.540623091020159e-05, |
|
"loss": 0.7663, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5394013439218084e-05, |
|
"loss": 0.7365, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5381795968234575e-05, |
|
"loss": 0.7419, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.536957849725107e-05, |
|
"loss": 1.0062, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5357361026267562e-05, |
|
"loss": 0.9579, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5345143555284057e-05, |
|
"loss": 0.7584, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.533292608430055e-05, |
|
"loss": 1.0455, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5320708613317047e-05, |
|
"loss": 1.0083, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.530849114233354e-05, |
|
"loss": 1.0071, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5296273671350034e-05, |
|
"loss": 1.0326, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5284056200366526e-05, |
|
"loss": 1.0268, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.527183872938302e-05, |
|
"loss": 1.0069, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5259621258399513e-05, |
|
"loss": 0.8719, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5247403787416006e-05, |
|
"loss": 0.8618, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.52351863164325e-05, |
|
"loss": 0.9551, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5222968845448993e-05, |
|
"loss": 0.8953, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5210751374465486e-05, |
|
"loss": 1.0501, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.519853390348198e-05, |
|
"loss": 0.9688, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5186316432498473e-05, |
|
"loss": 0.6816, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5174098961514966e-05, |
|
"loss": 1.1863, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.516188149053146e-05, |
|
"loss": 0.9933, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5149664019547953e-05, |
|
"loss": 0.8878, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.513744654856445e-05, |
|
"loss": 0.5381, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5125229077580943e-05, |
|
"loss": 1.7055, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.5113011606597437e-05, |
|
"loss": 0.9094, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.510079413561393e-05, |
|
"loss": 1.187, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.5088576664630424e-05, |
|
"loss": 0.9681, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.5076359193646917e-05, |
|
"loss": 0.8043, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.506414172266341e-05, |
|
"loss": 0.9815, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.5051924251679904e-05, |
|
"loss": 0.9994, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.5039706780696397e-05, |
|
"loss": 0.9199, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.502748930971289e-05, |
|
"loss": 0.7911, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.5015271838729384e-05, |
|
"loss": 0.6465, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.5003054367745877e-05, |
|
"loss": 0.8896, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4990836896762371e-05, |
|
"loss": 0.6867, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4978619425778864e-05, |
|
"loss": 1.0182, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4966401954795358e-05, |
|
"loss": 0.6785, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4954184483811851e-05, |
|
"loss": 0.8665, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4941967012828346e-05, |
|
"loss": 0.8603, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.492974954184484e-05, |
|
"loss": 0.8616, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4917532070861333e-05, |
|
"loss": 1.0062, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4905314599877826e-05, |
|
"loss": 0.8041, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.489309712889432e-05, |
|
"loss": 0.9556, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4880879657910813e-05, |
|
"loss": 0.9868, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4868662186927307e-05, |
|
"loss": 0.7873, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.48564447159438e-05, |
|
"loss": 1.095, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4844227244960293e-05, |
|
"loss": 0.9997, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4832009773976788e-05, |
|
"loss": 0.6657, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4819792302993282e-05, |
|
"loss": 0.7473, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4807574832009775e-05, |
|
"loss": 0.6901, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4795357361026269e-05, |
|
"loss": 0.7391, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4783139890042762e-05, |
|
"loss": 0.8638, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4770922419059255e-05, |
|
"loss": 1.1089, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.475870494807575e-05, |
|
"loss": 0.9425, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4746487477092244e-05, |
|
"loss": 0.9829, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4734270006108737e-05, |
|
"loss": 0.8924, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.472205253512523e-05, |
|
"loss": 0.748, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4709835064141724e-05, |
|
"loss": 0.8788, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4697617593158218e-05, |
|
"loss": 0.9783, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4685400122174711e-05, |
|
"loss": 0.983, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4673182651191204e-05, |
|
"loss": 0.8222, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4660965180207698e-05, |
|
"loss": 0.6696, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4648747709224191e-05, |
|
"loss": 0.8329, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4636530238240685e-05, |
|
"loss": 0.9915, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4624312767257178e-05, |
|
"loss": 0.9667, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4612095296273671e-05, |
|
"loss": 0.9371, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4599877825290165e-05, |
|
"loss": 0.9471, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4587660354306658e-05, |
|
"loss": 0.7271, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4575442883323155e-05, |
|
"loss": 0.7886, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4563225412339648e-05, |
|
"loss": 0.9027, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.4551007941356142e-05, |
|
"loss": 1.0519, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.4538790470372635e-05, |
|
"loss": 0.7029, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.4526572999389129e-05, |
|
"loss": 0.7978, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.4514355528405622e-05, |
|
"loss": 0.7465, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.4502138057422115e-05, |
|
"loss": 1.0307, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.4489920586438609e-05, |
|
"loss": 0.9182, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.4477703115455102e-05, |
|
"loss": 0.8945, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.4465485644471596e-05, |
|
"loss": 0.9725, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.4453268173488089e-05, |
|
"loss": 0.8708, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.4441050702504582e-05, |
|
"loss": 0.8558, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.4428833231521076e-05, |
|
"loss": 0.7542, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.441661576053757e-05, |
|
"loss": 0.7065, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.4404398289554063e-05, |
|
"loss": 0.8901, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.4392180818570558e-05, |
|
"loss": 0.9281, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.4379963347587051e-05, |
|
"loss": 0.6954, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.4367745876603545e-05, |
|
"loss": 1.0345, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.4355528405620038e-05, |
|
"loss": 0.8923, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.4343310934636531e-05, |
|
"loss": 1.9401, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.4331093463653025e-05, |
|
"loss": 0.9246, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.4318875992669518e-05, |
|
"loss": 0.9386, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.4306658521686012e-05, |
|
"loss": 1.0143, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.4294441050702505e-05, |
|
"loss": 1.0519, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.4282223579718998e-05, |
|
"loss": 0.8793, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.4270006108735492e-05, |
|
"loss": 0.953, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.4257788637751985e-05, |
|
"loss": 0.8635, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.4245571166768479e-05, |
|
"loss": 0.6856, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.4233353695784972e-05, |
|
"loss": 1.0121, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.4221136224801465e-05, |
|
"loss": 0.9451, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.4208918753817962e-05, |
|
"loss": 0.8199, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.4196701282834456e-05, |
|
"loss": 0.8782, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.4184483811850949e-05, |
|
"loss": 1.3716, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.4172266340867442e-05, |
|
"loss": 1.0054, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.4160048869883936e-05, |
|
"loss": 0.9208, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.4147831398900429e-05, |
|
"loss": 0.9424, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.4135613927916923e-05, |
|
"loss": 0.872, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.4123396456933416e-05, |
|
"loss": 0.9398, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.411117898594991e-05, |
|
"loss": 0.8352, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.4098961514966403e-05, |
|
"loss": 0.7677, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.4086744043982896e-05, |
|
"loss": 0.812, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.407452657299939e-05, |
|
"loss": 0.7955, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.4062309102015883e-05, |
|
"loss": 0.8821, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.4050091631032376e-05, |
|
"loss": 0.8505, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.403787416004887e-05, |
|
"loss": 0.9238, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.4025656689065365e-05, |
|
"loss": 0.8186, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.4013439218081858e-05, |
|
"loss": 0.7696, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.4001221747098352e-05, |
|
"loss": 0.9364, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.3989004276114847e-05, |
|
"loss": 0.9359, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.397678680513134e-05, |
|
"loss": 1.3465, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.3964569334147834e-05, |
|
"loss": 0.7101, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.3952351863164327e-05, |
|
"loss": 0.8432, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.394013439218082e-05, |
|
"loss": 0.8811, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.3927916921197314e-05, |
|
"loss": 0.98, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.3915699450213807e-05, |
|
"loss": 0.9535, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.39034819792303e-05, |
|
"loss": 0.8524, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.3891264508246794e-05, |
|
"loss": 0.8698, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.3879047037263287e-05, |
|
"loss": 0.8076, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.386682956627978e-05, |
|
"loss": 0.7953, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.3854612095296274e-05, |
|
"loss": 0.8003, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.384239462431277e-05, |
|
"loss": 0.9767, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.3830177153329263e-05, |
|
"loss": 0.9623, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.3817959682345756e-05, |
|
"loss": 0.8226, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.380574221136225e-05, |
|
"loss": 0.7341, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.3793524740378743e-05, |
|
"loss": 0.956, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.3781307269395236e-05, |
|
"loss": 0.9852, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.376908979841173e-05, |
|
"loss": 0.8391, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.3756872327428223e-05, |
|
"loss": 0.8168, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.3744654856444716e-05, |
|
"loss": 0.9037, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.373243738546121e-05, |
|
"loss": 0.783, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.3720219914477703e-05, |
|
"loss": 0.9026, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.3708002443494197e-05, |
|
"loss": 0.9929, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.369578497251069e-05, |
|
"loss": 0.7848, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.3683567501527183e-05, |
|
"loss": 0.8353, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.3671350030543677e-05, |
|
"loss": 0.7222, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.3659132559560174e-05, |
|
"loss": 0.8788, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.3646915088576667e-05, |
|
"loss": 0.8452, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.363469761759316e-05, |
|
"loss": 1.1741, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.3622480146609654e-05, |
|
"loss": 0.8651, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.3610262675626147e-05, |
|
"loss": 0.7235, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.359804520464264e-05, |
|
"loss": 0.7979, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.3585827733659134e-05, |
|
"loss": 1.0103, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.3573610262675627e-05, |
|
"loss": 0.8078, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.3561392791692121e-05, |
|
"loss": 0.8504, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.3549175320708614e-05, |
|
"loss": 0.9012, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.3536957849725108e-05, |
|
"loss": 0.8944, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.3524740378741601e-05, |
|
"loss": 0.6727, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.3512522907758094e-05, |
|
"loss": 0.9575, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.3500305436774588e-05, |
|
"loss": 0.9692, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.3488087965791081e-05, |
|
"loss": 0.8398, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.3475870494807576e-05, |
|
"loss": 0.9793, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.346365302382407e-05, |
|
"loss": 0.864, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.3451435552840563e-05, |
|
"loss": 0.7917, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.3439218081857057e-05, |
|
"loss": 0.9555, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.342700061087355e-05, |
|
"loss": 0.7762, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.3414783139890043e-05, |
|
"loss": 1.0284, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.3402565668906537e-05, |
|
"loss": 0.7863, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.339034819792303e-05, |
|
"loss": 0.8598, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.3378130726939524e-05, |
|
"loss": 1.0224, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.3365913255956019e-05, |
|
"loss": 1.0629, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.3353695784972512e-05, |
|
"loss": 0.7989, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.3341478313989005e-05, |
|
"loss": 0.8779, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.3329260843005499e-05, |
|
"loss": 0.9438, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.3317043372021992e-05, |
|
"loss": 0.7229, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.3304825901038486e-05, |
|
"loss": 1.3111, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.329260843005498e-05, |
|
"loss": 0.7954, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.3280390959071474e-05, |
|
"loss": 0.9515, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.3268173488087968e-05, |
|
"loss": 1.0521, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.3255956017104461e-05, |
|
"loss": 1.4883, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.3243738546120954e-05, |
|
"loss": 1.431, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.3231521075137448e-05, |
|
"loss": 0.7516, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.3219303604153941e-05, |
|
"loss": 0.7319, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.3207086133170435e-05, |
|
"loss": 0.908, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.3194868662186928e-05, |
|
"loss": 0.6689, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.3182651191203421e-05, |
|
"loss": 0.8322, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.3170433720219915e-05, |
|
"loss": 0.7078, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.3158216249236408e-05, |
|
"loss": 0.9021, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.3145998778252902e-05, |
|
"loss": 0.7606, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.3133781307269395e-05, |
|
"loss": 0.9277, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.3121563836285888e-05, |
|
"loss": 1.0257, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.3109346365302385e-05, |
|
"loss": 0.8763, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.3097128894318879e-05, |
|
"loss": 0.7545, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.3084911423335372e-05, |
|
"loss": 0.9252, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.3072693952351865e-05, |
|
"loss": 0.8209, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.3060476481368359e-05, |
|
"loss": 0.8351, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.3048259010384852e-05, |
|
"loss": 0.9211, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.3036041539401346e-05, |
|
"loss": 0.8233, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.3023824068417839e-05, |
|
"loss": 0.9121, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.3011606597434332e-05, |
|
"loss": 1.0651, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.2999389126450826e-05, |
|
"loss": 0.8624, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.298717165546732e-05, |
|
"loss": 0.972, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.2974954184483813e-05, |
|
"loss": 0.7719, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.2962736713500306e-05, |
|
"loss": 0.8996, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.29505192425168e-05, |
|
"loss": 0.7956, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.2938301771533293e-05, |
|
"loss": 0.7981, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.2926084300549788e-05, |
|
"loss": 0.7689, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.2913866829566281e-05, |
|
"loss": 1.0973, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.2901649358582775e-05, |
|
"loss": 1.0437, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.2889431887599268e-05, |
|
"loss": 0.7753, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.2877214416615762e-05, |
|
"loss": 0.9648, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.2864996945632255e-05, |
|
"loss": 0.7858, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.2852779474648748e-05, |
|
"loss": 0.8735, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.2840562003665242e-05, |
|
"loss": 0.8917, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.2828344532681735e-05, |
|
"loss": 0.9024, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.2816127061698229e-05, |
|
"loss": 0.8578, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.2803909590714722e-05, |
|
"loss": 1.0562, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.2791692119731215e-05, |
|
"loss": 1.0963, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.2779474648747709e-05, |
|
"loss": 1.0202, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.2767257177764202e-05, |
|
"loss": 0.8426, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.2755039706780696e-05, |
|
"loss": 0.8889, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.2742822235797192e-05, |
|
"loss": 0.7992, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.2730604764813686e-05, |
|
"loss": 0.7759, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.2718387293830179e-05, |
|
"loss": 0.8782, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.2706169822846673e-05, |
|
"loss": 0.8086, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.2693952351863166e-05, |
|
"loss": 0.8115, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.268173488087966e-05, |
|
"loss": 0.7768, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.2669517409896153e-05, |
|
"loss": 0.9156, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.2657299938912646e-05, |
|
"loss": 0.7294, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.264508246792914e-05, |
|
"loss": 0.8086, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.2632864996945633e-05, |
|
"loss": 0.7654, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.2620647525962126e-05, |
|
"loss": 0.7887, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.260843005497862e-05, |
|
"loss": 0.7976, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.2596212583995113e-05, |
|
"loss": 0.9478, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.2583995113011607e-05, |
|
"loss": 1.0769, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.25717776420281e-05, |
|
"loss": 0.8891, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.2559560171044595e-05, |
|
"loss": 0.9067, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.2547342700061088e-05, |
|
"loss": 0.9056, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.2535125229077582e-05, |
|
"loss": 0.8696, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.2522907758094075e-05, |
|
"loss": 1.0335, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.251069028711057e-05, |
|
"loss": 0.8562, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.2498472816127064e-05, |
|
"loss": 0.9686, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.2486255345143557e-05, |
|
"loss": 0.8547, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.247403787416005e-05, |
|
"loss": 0.8656, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.2461820403176544e-05, |
|
"loss": 0.9283, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.2449602932193037e-05, |
|
"loss": 0.6445, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.243738546120953e-05, |
|
"loss": 0.7627, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.2425167990226024e-05, |
|
"loss": 0.7629, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.2412950519242518e-05, |
|
"loss": 0.7638, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.2400733048259011e-05, |
|
"loss": 1.0806, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.2388515577275504e-05, |
|
"loss": 0.9898, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.2376298106292e-05, |
|
"loss": 0.827, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.2364080635308493e-05, |
|
"loss": 0.8702, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.2351863164324986e-05, |
|
"loss": 1.1027, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.233964569334148e-05, |
|
"loss": 0.8754, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.2327428222357973e-05, |
|
"loss": 0.9453, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.2315210751374466e-05, |
|
"loss": 0.8387, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.230299328039096e-05, |
|
"loss": 0.8396, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.2290775809407453e-05, |
|
"loss": 0.9751, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.2278558338423947e-05, |
|
"loss": 0.9615, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.226634086744044e-05, |
|
"loss": 0.6734, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.2254123396456933e-05, |
|
"loss": 0.8172, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.2241905925473427e-05, |
|
"loss": 0.8595, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.222968845448992e-05, |
|
"loss": 0.9492, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.2217470983506414e-05, |
|
"loss": 0.7598, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.2205253512522907e-05, |
|
"loss": 0.739, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.2193036041539404e-05, |
|
"loss": 1.0339, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.2180818570555897e-05, |
|
"loss": 0.8684, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.216860109957239e-05, |
|
"loss": 1.0383, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.2156383628588884e-05, |
|
"loss": 0.6612, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.2144166157605377e-05, |
|
"loss": 0.8846, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.2131948686621871e-05, |
|
"loss": 0.9956, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.2119731215638364e-05, |
|
"loss": 0.9889, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.2107513744654858e-05, |
|
"loss": 0.7723, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.2095296273671351e-05, |
|
"loss": 1.857, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.2083078802687844e-05, |
|
"loss": 1.1582, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.2070861331704338e-05, |
|
"loss": 0.765, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.2058643860720831e-05, |
|
"loss": 1.0211, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.2046426389737325e-05, |
|
"loss": 0.9328, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.2034208918753818e-05, |
|
"loss": 0.9722, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.2021991447770312e-05, |
|
"loss": 0.7764, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.2009773976786807e-05, |
|
"loss": 1.3868, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.19975565058033e-05, |
|
"loss": 0.9356, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.1985339034819793e-05, |
|
"loss": 0.7488, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.1973121563836287e-05, |
|
"loss": 0.9903, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.196090409285278e-05, |
|
"loss": 0.8998, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.1948686621869274e-05, |
|
"loss": 1.3415, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.1936469150885767e-05, |
|
"loss": 0.9781, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.192425167990226e-05, |
|
"loss": 0.9349, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.1912034208918754e-05, |
|
"loss": 1.4325, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.1899816737935247e-05, |
|
"loss": 0.8335, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.1887599266951742e-05, |
|
"loss": 0.8713, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.1875381795968236e-05, |
|
"loss": 0.8416, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.1863164324984729e-05, |
|
"loss": 0.7541, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.1850946854001223e-05, |
|
"loss": 0.9488, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.1838729383017716e-05, |
|
"loss": 1.3618, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.1826511912034211e-05, |
|
"loss": 0.7833, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.1814294441050704e-05, |
|
"loss": 0.8593, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.1802076970067198e-05, |
|
"loss": 1.5206, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.1789859499083691e-05, |
|
"loss": 0.9144, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.1777642028100185e-05, |
|
"loss": 0.8742, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.1765424557116678e-05, |
|
"loss": 0.8102, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.1753207086133171e-05, |
|
"loss": 0.8155, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.1740989615149665e-05, |
|
"loss": 1.3882, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.1728772144166158e-05, |
|
"loss": 0.7262, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.1716554673182652e-05, |
|
"loss": 0.8328, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.1704337202199145e-05, |
|
"loss": 1.0218, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.1692119731215638e-05, |
|
"loss": 0.8341, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.1679902260232132e-05, |
|
"loss": 0.9432, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.1667684789248625e-05, |
|
"loss": 0.6762, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.1655467318265119e-05, |
|
"loss": 1.0357, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.1643249847281615e-05, |
|
"loss": 1.127, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.1631032376298109e-05, |
|
"loss": 0.9397, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.1618814905314602e-05, |
|
"loss": 0.804, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.1606597434331096e-05, |
|
"loss": 0.9135, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.1594379963347589e-05, |
|
"loss": 0.8265, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.1582162492364082e-05, |
|
"loss": 0.8712, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.1569945021380576e-05, |
|
"loss": 0.8534, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.155772755039707e-05, |
|
"loss": 0.9936, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.1545510079413563e-05, |
|
"loss": 0.948, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.1533292608430056e-05, |
|
"loss": 0.8655, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.152107513744655e-05, |
|
"loss": 0.8543, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.1508857666463043e-05, |
|
"loss": 0.9635, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.1496640195479536e-05, |
|
"loss": 1.0263, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.148442272449603e-05, |
|
"loss": 1.0705, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.1472205253512523e-05, |
|
"loss": 0.8773, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.1459987782529018e-05, |
|
"loss": 0.946, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.1447770311545512e-05, |
|
"loss": 1.0058, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.1435552840562005e-05, |
|
"loss": 0.7454, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.1423335369578498e-05, |
|
"loss": 0.7572, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.1411117898594992e-05, |
|
"loss": 0.8239, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.1398900427611485e-05, |
|
"loss": 0.8484, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.1386682956627979e-05, |
|
"loss": 0.9367, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.1374465485644472e-05, |
|
"loss": 0.8643, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.1362248014660965e-05, |
|
"loss": 0.8459, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.1350030543677459e-05, |
|
"loss": 0.8101, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.1337813072693952e-05, |
|
"loss": 0.7751, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.1325595601710446e-05, |
|
"loss": 0.9773, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.1313378130726939e-05, |
|
"loss": 0.8384, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.1301160659743432e-05, |
|
"loss": 0.7405, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.1288943188759926e-05, |
|
"loss": 0.8987, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.1276725717776423e-05, |
|
"loss": 0.7701, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.1264508246792916e-05, |
|
"loss": 1.0532, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.125229077580941e-05, |
|
"loss": 0.596, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.1240073304825903e-05, |
|
"loss": 0.9213, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.1227855833842396e-05, |
|
"loss": 1.0111, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.121563836285889e-05, |
|
"loss": 0.9192, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.1203420891875383e-05, |
|
"loss": 0.8301, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.1191203420891876e-05, |
|
"loss": 0.9083, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.117898594990837e-05, |
|
"loss": 0.9174, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.1166768478924863e-05, |
|
"loss": 0.7942, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.1154551007941357e-05, |
|
"loss": 0.8123, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.114233353695785e-05, |
|
"loss": 1.4707, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.1130116065974343e-05, |
|
"loss": 1.1259, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.1117898594990837e-05, |
|
"loss": 0.9025, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.110568112400733e-05, |
|
"loss": 0.7853, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.1093463653023825e-05, |
|
"loss": 0.844, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.1081246182040319e-05, |
|
"loss": 0.8999, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.1069028711056812e-05, |
|
"loss": 0.9179, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.1056811240073305e-05, |
|
"loss": 0.8639, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.10445937690898e-05, |
|
"loss": 0.7318, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.1032376298106294e-05, |
|
"loss": 0.9066, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.1020158827122787e-05, |
|
"loss": 0.7056, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.100794135613928e-05, |
|
"loss": 0.7939, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.0995723885155774e-05, |
|
"loss": 1.0263, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.0983506414172268e-05, |
|
"loss": 0.7399, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.0971288943188761e-05, |
|
"loss": 0.9089, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.0959071472205254e-05, |
|
"loss": 1.7133, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.0946854001221748e-05, |
|
"loss": 0.8887, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.0934636530238241e-05, |
|
"loss": 0.8566, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.0922419059254735e-05, |
|
"loss": 0.7976, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.091020158827123e-05, |
|
"loss": 0.9515, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.0897984117287723e-05, |
|
"loss": 0.7716, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.0885766646304216e-05, |
|
"loss": 1.0556, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.087354917532071e-05, |
|
"loss": 0.823, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.0861331704337203e-05, |
|
"loss": 0.7892, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.0849114233353697e-05, |
|
"loss": 0.7573, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.083689676237019e-05, |
|
"loss": 0.9268, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.0824679291386684e-05, |
|
"loss": 0.991, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.0812461820403177e-05, |
|
"loss": 0.8278, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.080024434941967e-05, |
|
"loss": 0.7928, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.0788026878436164e-05, |
|
"loss": 1.2381, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.0775809407452657e-05, |
|
"loss": 0.7207, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.076359193646915e-05, |
|
"loss": 1.0762, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.0751374465485644e-05, |
|
"loss": 0.6081, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.0739156994502137e-05, |
|
"loss": 0.7581, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.0726939523518634e-05, |
|
"loss": 0.7063, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.0714722052535128e-05, |
|
"loss": 0.7691, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.0702504581551621e-05, |
|
"loss": 0.6792, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.0690287110568114e-05, |
|
"loss": 1.0439, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.0678069639584608e-05, |
|
"loss": 0.8147, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.0665852168601101e-05, |
|
"loss": 0.8393, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.0653634697617595e-05, |
|
"loss": 1.0408, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.0641417226634088e-05, |
|
"loss": 0.6781, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.0629199755650581e-05, |
|
"loss": 0.8998, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.0616982284667075e-05, |
|
"loss": 0.7384, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.0604764813683568e-05, |
|
"loss": 0.6089, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.0592547342700062e-05, |
|
"loss": 0.8509, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.0580329871716555e-05, |
|
"loss": 0.6579, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.0568112400733048e-05, |
|
"loss": 0.7701, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.0555894929749542e-05, |
|
"loss": 0.835, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.0543677458766037e-05, |
|
"loss": 1.524, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.053145998778253e-05, |
|
"loss": 0.8504, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.0519242516799024e-05, |
|
"loss": 0.7722, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.0507025045815517e-05, |
|
"loss": 0.7515, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.049480757483201e-05, |
|
"loss": 0.9103, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.0482590103848504e-05, |
|
"loss": 0.8075, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.0470372632864997e-05, |
|
"loss": 0.8557, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.045815516188149e-05, |
|
"loss": 1.0766, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.0445937690897984e-05, |
|
"loss": 0.7036, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.0433720219914477e-05, |
|
"loss": 1.0543, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.0421502748930973e-05, |
|
"loss": 0.9389, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.0409285277947466e-05, |
|
"loss": 0.8924, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.039706780696396e-05, |
|
"loss": 0.9157, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.0384850335980453e-05, |
|
"loss": 0.8804, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.0372632864996946e-05, |
|
"loss": 0.6333, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.0360415394013441e-05, |
|
"loss": 0.7308, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.0348197923029935e-05, |
|
"loss": 1.0287, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.0335980452046428e-05, |
|
"loss": 0.8002, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.0323762981062921e-05, |
|
"loss": 0.8761, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.0311545510079415e-05, |
|
"loss": 0.7511, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.0299328039095908e-05, |
|
"loss": 0.9387, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.0287110568112402e-05, |
|
"loss": 0.7782, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.0274893097128895e-05, |
|
"loss": 0.7705, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.0262675626145388e-05, |
|
"loss": 0.8385, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.0250458155161882e-05, |
|
"loss": 0.9434, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.0238240684178375e-05, |
|
"loss": 0.6204, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.0226023213194869e-05, |
|
"loss": 0.8088, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.0213805742211362e-05, |
|
"loss": 0.8011, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.0201588271227855e-05, |
|
"loss": 0.7251, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.0189370800244349e-05, |
|
"loss": 0.7744, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.0177153329260846e-05, |
|
"loss": 0.8424, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.0164935858277339e-05, |
|
"loss": 0.9567, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.0152718387293832e-05, |
|
"loss": 0.7057, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.0140500916310326e-05, |
|
"loss": 1.4146, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.012828344532682e-05, |
|
"loss": 0.8529, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.0116065974343313e-05, |
|
"loss": 1.2629, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.0103848503359806e-05, |
|
"loss": 0.8524, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.00916310323763e-05, |
|
"loss": 0.7813, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.0079413561392793e-05, |
|
"loss": 0.7327, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.0067196090409286e-05, |
|
"loss": 0.7983, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.005497861942578e-05, |
|
"loss": 1.5205, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.0042761148442273e-05, |
|
"loss": 1.5179, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.0030543677458766e-05, |
|
"loss": 0.6791, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.001832620647526e-05, |
|
"loss": 0.7707, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.0006108735491753e-05, |
|
"loss": 0.6349, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 9.993891264508248e-06, |
|
"loss": 0.6476, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 9.981673793524742e-06, |
|
"loss": 0.8216, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 9.969456322541235e-06, |
|
"loss": 1.0033, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 9.957238851557729e-06, |
|
"loss": 0.9262, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 9.945021380574222e-06, |
|
"loss": 1.0576, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 9.932803909590715e-06, |
|
"loss": 0.7989, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 9.920586438607209e-06, |
|
"loss": 0.8197, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 9.908368967623702e-06, |
|
"loss": 0.8522, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 9.896151496640196e-06, |
|
"loss": 0.7668, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 9.883934025656689e-06, |
|
"loss": 0.7285, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 9.871716554673182e-06, |
|
"loss": 0.7221, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 9.859499083689677e-06, |
|
"loss": 0.7452, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 9.847281612706171e-06, |
|
"loss": 1.016, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 9.835064141722664e-06, |
|
"loss": 0.6658, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 9.822846670739158e-06, |
|
"loss": 0.7427, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 9.810629199755651e-06, |
|
"loss": 0.7529, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 9.798411728772144e-06, |
|
"loss": 0.5825, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 9.786194257788638e-06, |
|
"loss": 1.1049, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 9.773976786805131e-06, |
|
"loss": 0.8419, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 9.761759315821626e-06, |
|
"loss": 0.6157, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 9.74954184483812e-06, |
|
"loss": 0.8446, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 9.737324373854613e-06, |
|
"loss": 0.9333, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 9.725106902871107e-06, |
|
"loss": 0.7765, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 9.7128894318876e-06, |
|
"loss": 0.7462, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 9.700671960904093e-06, |
|
"loss": 0.747, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.688454489920587e-06, |
|
"loss": 0.7388, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.676237018937082e-06, |
|
"loss": 0.6297, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.664019547953575e-06, |
|
"loss": 0.7654, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.651802076970069e-06, |
|
"loss": 0.6369, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.639584605986562e-06, |
|
"loss": 0.8589, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.627367135003055e-06, |
|
"loss": 0.6971, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.615149664019549e-06, |
|
"loss": 0.8982, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.602932193036042e-06, |
|
"loss": 0.9026, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.590714722052536e-06, |
|
"loss": 0.7071, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.578497251069029e-06, |
|
"loss": 0.5788, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.566279780085524e-06, |
|
"loss": 0.7693, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.554062309102018e-06, |
|
"loss": 0.5877, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.541844838118511e-06, |
|
"loss": 1.001, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 9.529627367135004e-06, |
|
"loss": 1.071, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 9.517409896151498e-06, |
|
"loss": 0.8251, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 9.505192425167991e-06, |
|
"loss": 0.7853, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 9.492974954184485e-06, |
|
"loss": 1.0731, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.480757483200978e-06, |
|
"loss": 0.7628, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.468540012217471e-06, |
|
"loss": 0.6823, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.456322541233965e-06, |
|
"loss": 0.9685, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.444105070250458e-06, |
|
"loss": 0.5667, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 9.431887599266952e-06, |
|
"loss": 0.8079, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 9.419670128283445e-06, |
|
"loss": 0.7269, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 9.407452657299938e-06, |
|
"loss": 0.8599, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 9.395235186316434e-06, |
|
"loss": 0.9517, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 9.383017715332927e-06, |
|
"loss": 0.9149, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 9.37080024434942e-06, |
|
"loss": 0.6137, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 9.358582773365914e-06, |
|
"loss": 0.5516, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 9.346365302382407e-06, |
|
"loss": 0.6649, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 9.3341478313989e-06, |
|
"loss": 0.5883, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 9.321930360415394e-06, |
|
"loss": 0.7139, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 9.309712889431889e-06, |
|
"loss": 0.9205, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 9.297495418448382e-06, |
|
"loss": 0.85, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 9.285277947464876e-06, |
|
"loss": 0.7718, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 9.27306047648137e-06, |
|
"loss": 0.825, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 9.260843005497863e-06, |
|
"loss": 0.8732, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 9.248625534514356e-06, |
|
"loss": 0.6703, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 9.23640806353085e-06, |
|
"loss": 0.6713, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 9.224190592547343e-06, |
|
"loss": 0.9596, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 9.211973121563838e-06, |
|
"loss": 0.8786, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 9.199755650580331e-06, |
|
"loss": 1.0151, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 9.187538179596825e-06, |
|
"loss": 0.9265, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 9.175320708613318e-06, |
|
"loss": 0.9583, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 9.163103237629812e-06, |
|
"loss": 0.7636, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 9.150885766646305e-06, |
|
"loss": 0.6883, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 9.138668295662798e-06, |
|
"loss": 0.9835, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 9.126450824679293e-06, |
|
"loss": 0.7913, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 9.114233353695787e-06, |
|
"loss": 0.6743, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 9.10201588271228e-06, |
|
"loss": 0.7571, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 9.089798411728774e-06, |
|
"loss": 0.8837, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 9.077580940745267e-06, |
|
"loss": 0.667, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 9.06536346976176e-06, |
|
"loss": 0.6893, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 9.053145998778254e-06, |
|
"loss": 0.7925, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 9.040928527794747e-06, |
|
"loss": 1.0578, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 9.02871105681124e-06, |
|
"loss": 0.7607, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 9.016493585827734e-06, |
|
"loss": 0.699, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 9.004276114844227e-06, |
|
"loss": 0.917, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 8.992058643860721e-06, |
|
"loss": 0.8916, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 8.979841172877214e-06, |
|
"loss": 0.7743, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 8.967623701893708e-06, |
|
"loss": 0.7081, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 8.955406230910201e-06, |
|
"loss": 0.7895, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 8.943188759926696e-06, |
|
"loss": 0.6802, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 8.93097128894319e-06, |
|
"loss": 0.7985, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 8.918753817959683e-06, |
|
"loss": 0.6591, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 8.906536346976176e-06, |
|
"loss": 0.9058, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 8.89431887599267e-06, |
|
"loss": 0.7653, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 8.882101405009163e-06, |
|
"loss": 0.6808, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 8.869883934025657e-06, |
|
"loss": 0.5374, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 8.85766646304215e-06, |
|
"loss": 0.7644, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 8.845448992058645e-06, |
|
"loss": 0.7714, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 8.833231521075138e-06, |
|
"loss": 0.8063, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 8.821014050091632e-06, |
|
"loss": 0.8461, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 8.808796579108125e-06, |
|
"loss": 0.7472, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 8.796579108124619e-06, |
|
"loss": 0.8145, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 8.784361637141112e-06, |
|
"loss": 0.8239, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 8.772144166157605e-06, |
|
"loss": 0.7163, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 8.7599266951741e-06, |
|
"loss": 0.8923, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 8.747709224190594e-06, |
|
"loss": 0.7993, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 8.735491753207087e-06, |
|
"loss": 0.6699, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 8.72327428222358e-06, |
|
"loss": 0.8984, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 8.711056811240074e-06, |
|
"loss": 0.6659, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 8.698839340256568e-06, |
|
"loss": 0.7883, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 8.686621869273061e-06, |
|
"loss": 0.7224, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 8.674404398289554e-06, |
|
"loss": 1.1198, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 8.66218692730605e-06, |
|
"loss": 0.8655, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 8.649969456322543e-06, |
|
"loss": 0.7088, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 8.637751985339036e-06, |
|
"loss": 0.9763, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 8.62553451435553e-06, |
|
"loss": 0.9184, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 8.613317043372023e-06, |
|
"loss": 0.8544, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 8.601099572388516e-06, |
|
"loss": 1.6064, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 8.58888210140501e-06, |
|
"loss": 0.728, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 8.576664630421503e-06, |
|
"loss": 0.7868, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 8.564447159437997e-06, |
|
"loss": 0.8528, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 8.55222968845449e-06, |
|
"loss": 0.6271, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 8.540012217470983e-06, |
|
"loss": 0.6622, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 8.527794746487477e-06, |
|
"loss": 0.8103, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 8.515577275503972e-06, |
|
"loss": 0.9601, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 8.503359804520465e-06, |
|
"loss": 0.9447, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 8.491142333536959e-06, |
|
"loss": 0.8028, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 8.478924862553452e-06, |
|
"loss": 0.8105, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 8.466707391569946e-06, |
|
"loss": 0.85, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 8.454489920586439e-06, |
|
"loss": 1.0883, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 8.442272449602932e-06, |
|
"loss": 0.87, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 8.430054978619426e-06, |
|
"loss": 1.6745, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 8.41783750763592e-06, |
|
"loss": 0.694, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 8.405620036652413e-06, |
|
"loss": 1.0145, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 8.393402565668908e-06, |
|
"loss": 0.637, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 8.381185094685401e-06, |
|
"loss": 0.5879, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 8.368967623701895e-06, |
|
"loss": 0.8478, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 8.356750152718388e-06, |
|
"loss": 0.7297, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 8.344532681734881e-06, |
|
"loss": 0.9499, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 8.332315210751375e-06, |
|
"loss": 1.1121, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 8.320097739767868e-06, |
|
"loss": 0.7404, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 8.307880268784362e-06, |
|
"loss": 0.8251, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 8.295662797800857e-06, |
|
"loss": 0.7374, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 8.28344532681735e-06, |
|
"loss": 0.6983, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 8.271227855833843e-06, |
|
"loss": 0.6778, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 8.259010384850337e-06, |
|
"loss": 0.7844, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 8.24679291386683e-06, |
|
"loss": 0.7389, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 8.234575442883324e-06, |
|
"loss": 0.6656, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 8.222357971899817e-06, |
|
"loss": 0.7048, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 8.210140500916312e-06, |
|
"loss": 0.8796, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 8.197923029932806e-06, |
|
"loss": 0.9691, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 8.185705558949299e-06, |
|
"loss": 1.071, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 8.173488087965792e-06, |
|
"loss": 0.7763, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 8.161270616982286e-06, |
|
"loss": 0.9646, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 8.149053145998779e-06, |
|
"loss": 0.7653, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 8.136835675015273e-06, |
|
"loss": 0.6659, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 8.124618204031766e-06, |
|
"loss": 0.8843, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 8.11240073304826e-06, |
|
"loss": 0.9721, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 8.100183262064754e-06, |
|
"loss": 0.8309, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 8.087965791081248e-06, |
|
"loss": 0.8223, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 8.075748320097741e-06, |
|
"loss": 0.705, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 8.063530849114235e-06, |
|
"loss": 0.7979, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 8.051313378130728e-06, |
|
"loss": 0.7743, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 8.039095907147221e-06, |
|
"loss": 0.9493, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 8.026878436163715e-06, |
|
"loss": 0.6721, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 8.014660965180208e-06, |
|
"loss": 0.6902, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 8.002443494196702e-06, |
|
"loss": 1.0082, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 7.990226023213195e-06, |
|
"loss": 0.7571, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 7.978008552229688e-06, |
|
"loss": 0.9917, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 7.965791081246182e-06, |
|
"loss": 0.8345, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 7.953573610262675e-06, |
|
"loss": 0.9306, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 7.941356139279169e-06, |
|
"loss": 0.7211, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 7.929138668295664e-06, |
|
"loss": 0.8456, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 7.916921197312157e-06, |
|
"loss": 0.5964, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 7.90470372632865e-06, |
|
"loss": 0.6603, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 7.892486255345144e-06, |
|
"loss": 1.0281, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 7.880268784361637e-06, |
|
"loss": 0.7166, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 7.86805131337813e-06, |
|
"loss": 0.8704, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 7.855833842394624e-06, |
|
"loss": 0.9504, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 7.84361637141112e-06, |
|
"loss": 0.9384, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 7.831398900427613e-06, |
|
"loss": 0.8495, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 7.819181429444106e-06, |
|
"loss": 0.7269, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 7.8069639584606e-06, |
|
"loss": 0.69, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 7.794746487477093e-06, |
|
"loss": 0.6685, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 7.782529016493586e-06, |
|
"loss": 1.1363, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 7.77031154551008e-06, |
|
"loss": 0.8077, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 7.758094074526573e-06, |
|
"loss": 0.8978, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 7.745876603543068e-06, |
|
"loss": 0.8733, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 7.733659132559562e-06, |
|
"loss": 0.775, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 7.721441661576055e-06, |
|
"loss": 0.6595, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 7.709224190592548e-06, |
|
"loss": 0.8059, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 7.697006719609042e-06, |
|
"loss": 0.6729, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 7.684789248625535e-06, |
|
"loss": 0.8428, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 7.672571777642029e-06, |
|
"loss": 0.7757, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 7.660354306658524e-06, |
|
"loss": 0.7084, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 7.648136835675017e-06, |
|
"loss": 1.0824, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 7.63591936469151e-06, |
|
"loss": 0.8404, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.623701893708003e-06, |
|
"loss": 0.6158, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.611484422724496e-06, |
|
"loss": 0.6988, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.59926695174099e-06, |
|
"loss": 0.9286, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.587049480757483e-06, |
|
"loss": 0.7154, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.574832009773977e-06, |
|
"loss": 1.5328, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.562614538790472e-06, |
|
"loss": 0.657, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.550397067806965e-06, |
|
"loss": 0.9424, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.5381795968234585e-06, |
|
"loss": 0.6289, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.525962125839952e-06, |
|
"loss": 0.5821, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.513744654856445e-06, |
|
"loss": 0.6804, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.501527183872939e-06, |
|
"loss": 0.8109, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.489309712889432e-06, |
|
"loss": 0.8101, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.4770922419059255e-06, |
|
"loss": 0.7429, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.46487477092242e-06, |
|
"loss": 0.9075, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.452657299938913e-06, |
|
"loss": 1.0113, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.440439828955407e-06, |
|
"loss": 0.861, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.4282223579719e-06, |
|
"loss": 0.7815, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.416004886988394e-06, |
|
"loss": 0.8278, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.403787416004888e-06, |
|
"loss": 0.8701, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.391569945021381e-06, |
|
"loss": 0.9023, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.379352474037875e-06, |
|
"loss": 0.9644, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.367135003054369e-06, |
|
"loss": 0.94, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.354917532070862e-06, |
|
"loss": 1.2196, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.3427000610873555e-06, |
|
"loss": 0.9333, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.330482590103849e-06, |
|
"loss": 1.4036, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.318265119120342e-06, |
|
"loss": 0.6719, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.306047648136836e-06, |
|
"loss": 0.8958, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.293830177153329e-06, |
|
"loss": 0.9586, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.281612706169824e-06, |
|
"loss": 1.1638, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.269395235186318e-06, |
|
"loss": 0.7935, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 7.257177764202811e-06, |
|
"loss": 0.7792, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 7.244960293219304e-06, |
|
"loss": 0.7481, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 7.232742822235798e-06, |
|
"loss": 0.7824, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 7.220525351252291e-06, |
|
"loss": 0.7779, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 7.208307880268785e-06, |
|
"loss": 0.8583, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 7.196090409285279e-06, |
|
"loss": 0.9604, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 7.183872938301772e-06, |
|
"loss": 0.9028, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 7.171655467318266e-06, |
|
"loss": 0.974, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 7.159437996334759e-06, |
|
"loss": 0.8217, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 7.1472205253512525e-06, |
|
"loss": 0.9163, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 7.135003054367746e-06, |
|
"loss": 0.9282, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 7.122785583384239e-06, |
|
"loss": 0.8591, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 7.110568112400733e-06, |
|
"loss": 1.0307, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 7.098350641417228e-06, |
|
"loss": 0.743, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 7.086133170433721e-06, |
|
"loss": 0.6567, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 7.0739156994502146e-06, |
|
"loss": 1.1268, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 7.061698228466708e-06, |
|
"loss": 0.783, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.049480757483201e-06, |
|
"loss": 0.7813, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.037263286499695e-06, |
|
"loss": 0.8186, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.025045815516188e-06, |
|
"loss": 0.635, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.012828344532682e-06, |
|
"loss": 0.6522, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.000610873549176e-06, |
|
"loss": 0.7262, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.98839340256567e-06, |
|
"loss": 0.7929, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.9761759315821635e-06, |
|
"loss": 0.7749, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.963958460598657e-06, |
|
"loss": 0.7596, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.95174098961515e-06, |
|
"loss": 0.922, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.939523518631644e-06, |
|
"loss": 0.5855, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.927306047648137e-06, |
|
"loss": 0.7604, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.915088576664631e-06, |
|
"loss": 0.8295, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.902871105681125e-06, |
|
"loss": 0.8448, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.890653634697618e-06, |
|
"loss": 0.7351, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.8784361637141115e-06, |
|
"loss": 0.942, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.866218692730605e-06, |
|
"loss": 0.7727, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.854001221747098e-06, |
|
"loss": 0.6829, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.841783750763592e-06, |
|
"loss": 0.6964, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.829566279780087e-06, |
|
"loss": 0.8976, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.81734880879658e-06, |
|
"loss": 0.8288, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.805131337813074e-06, |
|
"loss": 1.0624, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.792913866829567e-06, |
|
"loss": 0.8177, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.7806963958460604e-06, |
|
"loss": 0.6992, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.768478924862554e-06, |
|
"loss": 0.7451, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.756261453879047e-06, |
|
"loss": 0.6802, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.744043982895541e-06, |
|
"loss": 0.7435, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.731826511912035e-06, |
|
"loss": 0.8013, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.719609040928528e-06, |
|
"loss": 0.8073, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.707391569945022e-06, |
|
"loss": 0.888, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.695174098961515e-06, |
|
"loss": 0.6691, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.682956627978009e-06, |
|
"loss": 0.8261, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.670739156994503e-06, |
|
"loss": 0.6651, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.658521686010996e-06, |
|
"loss": 0.7034, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 6.64630421502749e-06, |
|
"loss": 0.8504, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 6.634086744043984e-06, |
|
"loss": 0.8357, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 6.621869273060477e-06, |
|
"loss": 0.8512, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 6.609651802076971e-06, |
|
"loss": 1.2667, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 6.597434331093464e-06, |
|
"loss": 0.7462, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 6.585216860109957e-06, |
|
"loss": 0.839, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 6.572999389126451e-06, |
|
"loss": 0.8414, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 6.560781918142944e-06, |
|
"loss": 0.7237, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 6.548564447159439e-06, |
|
"loss": 0.9485, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 6.536346976175933e-06, |
|
"loss": 0.833, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 6.524129505192426e-06, |
|
"loss": 0.7267, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 6.5119120342089195e-06, |
|
"loss": 0.8648, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 6.499694563225413e-06, |
|
"loss": 0.8828, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 6.487477092241906e-06, |
|
"loss": 0.7236, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 6.4752596212584e-06, |
|
"loss": 1.5567, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 6.463042150274894e-06, |
|
"loss": 0.9622, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 6.450824679291387e-06, |
|
"loss": 0.9306, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 6.438607208307881e-06, |
|
"loss": 0.8828, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 6.426389737324374e-06, |
|
"loss": 0.9214, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 6.4141722663408676e-06, |
|
"loss": 0.7418, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 6.401954795357361e-06, |
|
"loss": 0.7288, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 6.389737324373854e-06, |
|
"loss": 0.7104, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 6.377519853390348e-06, |
|
"loss": 0.7611, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 6.365302382406843e-06, |
|
"loss": 0.7826, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 6.353084911423336e-06, |
|
"loss": 0.8067, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 6.34086744043983e-06, |
|
"loss": 0.9108, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 6.328649969456323e-06, |
|
"loss": 0.7216, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 6.3164324984728165e-06, |
|
"loss": 0.661, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 6.30421502748931e-06, |
|
"loss": 1.8794, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 6.291997556505803e-06, |
|
"loss": 0.863, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.2797800855222975e-06, |
|
"loss": 0.7436, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.267562614538791e-06, |
|
"loss": 0.7005, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.255345143555285e-06, |
|
"loss": 0.858, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.243127672571779e-06, |
|
"loss": 0.9276, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 6.230910201588272e-06, |
|
"loss": 0.8155, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 6.218692730604765e-06, |
|
"loss": 0.9534, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 6.206475259621259e-06, |
|
"loss": 0.9115, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 6.194257788637752e-06, |
|
"loss": 0.8754, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 6.1820403176542464e-06, |
|
"loss": 0.8979, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 6.16982284667074e-06, |
|
"loss": 0.6235, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 6.157605375687233e-06, |
|
"loss": 0.8532, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 6.145387904703727e-06, |
|
"loss": 0.5879, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 6.13317043372022e-06, |
|
"loss": 1.1309, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 6.1209529627367134e-06, |
|
"loss": 0.7165, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 6.108735491753207e-06, |
|
"loss": 0.7236, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 6.096518020769702e-06, |
|
"loss": 0.6449, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 6.084300549786195e-06, |
|
"loss": 0.7674, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 6.072083078802689e-06, |
|
"loss": 0.7555, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 6.059865607819182e-06, |
|
"loss": 0.9268, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 6.0476481368356755e-06, |
|
"loss": 0.859, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 6.035430665852169e-06, |
|
"loss": 0.9497, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 6.023213194868662e-06, |
|
"loss": 0.7941, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 6.010995723885156e-06, |
|
"loss": 0.7504, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 5.99877825290165e-06, |
|
"loss": 0.9285, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 5.986560781918143e-06, |
|
"loss": 0.6759, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 5.974343310934637e-06, |
|
"loss": 0.8182, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 5.96212583995113e-06, |
|
"loss": 0.7187, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 5.949908368967624e-06, |
|
"loss": 0.7205, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 5.937690897984118e-06, |
|
"loss": 0.8731, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 5.925473427000611e-06, |
|
"loss": 0.7688, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 5.9132559560171055e-06, |
|
"loss": 0.997, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 5.901038485033599e-06, |
|
"loss": 0.8691, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 5.888821014050092e-06, |
|
"loss": 0.8828, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 5.876603543066586e-06, |
|
"loss": 0.7225, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 5.864386072083079e-06, |
|
"loss": 0.7871, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 5.8521686010995725e-06, |
|
"loss": 0.7691, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 5.839951130116066e-06, |
|
"loss": 0.5135, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 5.827733659132559e-06, |
|
"loss": 0.7871, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 5.815516188149054e-06, |
|
"loss": 0.6679, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 5.803298717165548e-06, |
|
"loss": 1.031, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 5.791081246182041e-06, |
|
"loss": 0.7916, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 5.778863775198535e-06, |
|
"loss": 0.8295, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 5.766646304215028e-06, |
|
"loss": 0.7844, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 5.754428833231521e-06, |
|
"loss": 0.9051, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 5.742211362248015e-06, |
|
"loss": 0.8259, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 5.729993891264509e-06, |
|
"loss": 0.9582, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 5.7177764202810025e-06, |
|
"loss": 0.642, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 5.705558949297496e-06, |
|
"loss": 0.991, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 5.693341478313989e-06, |
|
"loss": 0.6479, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 5.681124007330483e-06, |
|
"loss": 0.7287, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 5.668906536346976e-06, |
|
"loss": 0.8404, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.6566890653634695e-06, |
|
"loss": 0.6642, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.644471594379963e-06, |
|
"loss": 0.7542, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.632254123396458e-06, |
|
"loss": 0.725, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.620036652412951e-06, |
|
"loss": 0.9232, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.607819181429445e-06, |
|
"loss": 0.8276, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.595601710445938e-06, |
|
"loss": 0.8061, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.583384239462432e-06, |
|
"loss": 0.7899, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.571166768478925e-06, |
|
"loss": 0.6796, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 5.558949297495418e-06, |
|
"loss": 1.2144, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 5.546731826511913e-06, |
|
"loss": 0.9161, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 5.534514355528406e-06, |
|
"loss": 0.703, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 5.5222968845449e-06, |
|
"loss": 0.7895, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 5.510079413561394e-06, |
|
"loss": 0.8698, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 5.497861942577887e-06, |
|
"loss": 0.8151, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 5.4856444715943805e-06, |
|
"loss": 0.8188, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 5.473427000610874e-06, |
|
"loss": 0.7921, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 5.461209529627367e-06, |
|
"loss": 0.9679, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 5.4489920586438615e-06, |
|
"loss": 0.9382, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 5.436774587660355e-06, |
|
"loss": 0.8972, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 5.424557116676848e-06, |
|
"loss": 0.9547, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 5.412339645693342e-06, |
|
"loss": 1.0031, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 5.400122174709835e-06, |
|
"loss": 0.8118, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 5.3879047037263286e-06, |
|
"loss": 1.0146, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 5.375687232742822e-06, |
|
"loss": 0.823, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 5.363469761759317e-06, |
|
"loss": 0.612, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 5.3512522907758105e-06, |
|
"loss": 0.5458, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 5.339034819792304e-06, |
|
"loss": 0.7479, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 5.326817348808797e-06, |
|
"loss": 0.7901, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 5.314599877825291e-06, |
|
"loss": 0.8698, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.302382406841784e-06, |
|
"loss": 0.878, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.2901649358582775e-06, |
|
"loss": 0.9557, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.277947464874771e-06, |
|
"loss": 0.7904, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.265729993891265e-06, |
|
"loss": 0.5534, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.2535125229077585e-06, |
|
"loss": 0.85, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 5.241295051924252e-06, |
|
"loss": 0.8382, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 5.229077580940745e-06, |
|
"loss": 0.8567, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 5.216860109957239e-06, |
|
"loss": 0.93, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 5.204642638973733e-06, |
|
"loss": 0.7963, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 5.192425167990226e-06, |
|
"loss": 0.7454, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 5.180207697006721e-06, |
|
"loss": 0.7997, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 5.167990226023214e-06, |
|
"loss": 0.9103, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 5.155772755039707e-06, |
|
"loss": 1.0287, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 5.143555284056201e-06, |
|
"loss": 0.5028, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 5.131337813072694e-06, |
|
"loss": 0.7039, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 5.119120342089188e-06, |
|
"loss": 0.7169, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 5.106902871105681e-06, |
|
"loss": 0.6316, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 5.0946854001221744e-06, |
|
"loss": 0.9487, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 5.0824679291386695e-06, |
|
"loss": 0.956, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 5.070250458155163e-06, |
|
"loss": 0.8551, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 5.058032987171656e-06, |
|
"loss": 0.7661, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 5.04581551618815e-06, |
|
"loss": 0.9344, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 5.033598045204643e-06, |
|
"loss": 0.8179, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 5.0213805742211365e-06, |
|
"loss": 0.5385, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 5.00916310323763e-06, |
|
"loss": 0.7823, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 4.996945632254124e-06, |
|
"loss": 0.5261, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 4.984728161270618e-06, |
|
"loss": 0.8728, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 4.972510690287111e-06, |
|
"loss": 0.7396, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 4.960293219303604e-06, |
|
"loss": 0.595, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 4.948075748320098e-06, |
|
"loss": 0.5918, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 4.935858277336591e-06, |
|
"loss": 0.8228, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 4.9236408063530854e-06, |
|
"loss": 0.8024, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 4.911423335369579e-06, |
|
"loss": 1.4011, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 4.899205864386072e-06, |
|
"loss": 0.8171, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 4.886988393402566e-06, |
|
"loss": 0.8846, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 4.87477092241906e-06, |
|
"loss": 0.8681, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 4.862553451435553e-06, |
|
"loss": 0.6814, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 4.850335980452047e-06, |
|
"loss": 0.8267, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 4.838118509468541e-06, |
|
"loss": 0.6486, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 4.825901038485034e-06, |
|
"loss": 0.5926, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 4.813683567501528e-06, |
|
"loss": 0.7433, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 4.801466096518021e-06, |
|
"loss": 0.4934, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 4.7892486255345146e-06, |
|
"loss": 1.2426, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 4.777031154551009e-06, |
|
"loss": 0.8573, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 4.764813683567502e-06, |
|
"loss": 0.9291, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 4.752596212583996e-06, |
|
"loss": 0.6507, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 4.740378741600489e-06, |
|
"loss": 0.9251, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 4.728161270616982e-06, |
|
"loss": 0.7677, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 4.715943799633476e-06, |
|
"loss": 0.8849, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 4.703726328649969e-06, |
|
"loss": 0.6406, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 4.6915088576664635e-06, |
|
"loss": 0.6577, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 4.679291386682957e-06, |
|
"loss": 0.7591, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 4.66707391569945e-06, |
|
"loss": 0.7739, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 4.6548564447159445e-06, |
|
"loss": 0.9422, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 4.642638973732438e-06, |
|
"loss": 0.8479, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.630421502748931e-06, |
|
"loss": 0.8224, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.618204031765425e-06, |
|
"loss": 0.6312, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.605986560781919e-06, |
|
"loss": 0.7619, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.593769089798412e-06, |
|
"loss": 0.8482, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.581551618814906e-06, |
|
"loss": 0.5465, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 4.569334147831399e-06, |
|
"loss": 0.7035, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 4.557116676847893e-06, |
|
"loss": 0.6968, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 4.544899205864387e-06, |
|
"loss": 0.7064, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 4.53268173488088e-06, |
|
"loss": 0.7515, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.520464263897374e-06, |
|
"loss": 0.5318, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.508246792913867e-06, |
|
"loss": 0.713, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.4960293219303604e-06, |
|
"loss": 0.7113, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.483811850946854e-06, |
|
"loss": 0.676, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 4.471594379963348e-06, |
|
"loss": 0.772, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 4.4593769089798415e-06, |
|
"loss": 0.8913, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 4.447159437996335e-06, |
|
"loss": 0.6644, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 4.434941967012828e-06, |
|
"loss": 0.684, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.4227244960293225e-06, |
|
"loss": 0.7328, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.410507025045816e-06, |
|
"loss": 0.5559, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.398289554062309e-06, |
|
"loss": 0.7789, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.386072083078803e-06, |
|
"loss": 0.6681, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.373854612095297e-06, |
|
"loss": 0.7354, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 4.36163714111179e-06, |
|
"loss": 0.8043, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 4.349419670128284e-06, |
|
"loss": 0.7991, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 4.337202199144777e-06, |
|
"loss": 0.9516, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 4.3249847281612714e-06, |
|
"loss": 0.571, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.312767257177765e-06, |
|
"loss": 0.5836, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.300549786194258e-06, |
|
"loss": 0.6762, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.288332315210752e-06, |
|
"loss": 0.7723, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.276114844227245e-06, |
|
"loss": 0.6468, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 4.2638973732437384e-06, |
|
"loss": 0.6794, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 4.251679902260233e-06, |
|
"loss": 0.8168, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 4.239462431276726e-06, |
|
"loss": 0.6328, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 4.2272449602932195e-06, |
|
"loss": 0.677, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.215027489309713e-06, |
|
"loss": 0.9223, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.202810018326206e-06, |
|
"loss": 0.7854, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.1905925473427006e-06, |
|
"loss": 0.9142, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.178375076359194e-06, |
|
"loss": 0.6388, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 4.166157605375687e-06, |
|
"loss": 0.7558, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 4.153940134392181e-06, |
|
"loss": 0.6888, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 4.141722663408675e-06, |
|
"loss": 0.7583, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 4.129505192425168e-06, |
|
"loss": 0.7394, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 4.117287721441662e-06, |
|
"loss": 0.5955, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 4.105070250458156e-06, |
|
"loss": 0.6712, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 4.0928527794746495e-06, |
|
"loss": 0.7221, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 4.080635308491143e-06, |
|
"loss": 0.8962, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 4.068417837507636e-06, |
|
"loss": 1.1711, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.05620036652413e-06, |
|
"loss": 0.7221, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.043982895540624e-06, |
|
"loss": 0.7519, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.031765424557117e-06, |
|
"loss": 0.6935, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.019547953573611e-06, |
|
"loss": 1.0917, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 4.007330482590104e-06, |
|
"loss": 0.6309, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.9951130116065975e-06, |
|
"loss": 0.72, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.982895540623091e-06, |
|
"loss": 0.7765, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.970678069639584e-06, |
|
"loss": 0.5984, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.9584605986560786e-06, |
|
"loss": 0.6353, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.946243127672572e-06, |
|
"loss": 0.7519, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.934025656689065e-06, |
|
"loss": 0.6821, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.92180818570556e-06, |
|
"loss": 0.6824, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.909590714722053e-06, |
|
"loss": 0.8789, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.8973732437385464e-06, |
|
"loss": 0.5802, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.88515577275504e-06, |
|
"loss": 0.7663, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.872938301771534e-06, |
|
"loss": 0.6837, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.8607208307880275e-06, |
|
"loss": 0.5341, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.848503359804521e-06, |
|
"loss": 0.6034, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.836285888821014e-06, |
|
"loss": 0.9526, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.8240684178375085e-06, |
|
"loss": 1.354, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.8118509468540015e-06, |
|
"loss": 0.9056, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 3.799633475870495e-06, |
|
"loss": 0.6277, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 3.7874160048869883e-06, |
|
"loss": 0.7142, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 3.7751985339034826e-06, |
|
"loss": 0.6507, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 3.762981062919976e-06, |
|
"loss": 0.7237, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.7507635919364694e-06, |
|
"loss": 0.8355, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.7385461209529628e-06, |
|
"loss": 0.6059, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.7263286499694566e-06, |
|
"loss": 1.1816, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.71411117898595e-06, |
|
"loss": 0.6952, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.701893708002444e-06, |
|
"loss": 0.7826, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 3.6896762370189376e-06, |
|
"loss": 0.6933, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 3.677458766035431e-06, |
|
"loss": 0.753, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 3.6652412950519244e-06, |
|
"loss": 0.8419, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 3.653023824068418e-06, |
|
"loss": 0.7823, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.640806353084912e-06, |
|
"loss": 0.7519, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.6285888821014055e-06, |
|
"loss": 0.8257, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.616371411117899e-06, |
|
"loss": 0.9042, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.6041539401343923e-06, |
|
"loss": 0.7135, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.591936469150886e-06, |
|
"loss": 0.6593, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.5797189981673795e-06, |
|
"loss": 0.866, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.567501527183873e-06, |
|
"loss": 0.7878, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.5552840562003663e-06, |
|
"loss": 0.6304, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 3.5430665852168606e-06, |
|
"loss": 0.6375, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 3.530849114233354e-06, |
|
"loss": 0.7406, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 3.5186316432498474e-06, |
|
"loss": 0.7167, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 3.506414172266341e-06, |
|
"loss": 0.9316, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.494196701282835e-06, |
|
"loss": 0.7046, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.4819792302993284e-06, |
|
"loss": 0.6701, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.469761759315822e-06, |
|
"loss": 0.774, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.4575442883323157e-06, |
|
"loss": 0.6692, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.445326817348809e-06, |
|
"loss": 0.9899, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 3.4331093463653025e-06, |
|
"loss": 0.7039, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 3.420891875381796e-06, |
|
"loss": 0.7872, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 3.40867440439829e-06, |
|
"loss": 0.8564, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 3.3964569334147835e-06, |
|
"loss": 0.6566, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.384239462431277e-06, |
|
"loss": 0.5997, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.3720219914477703e-06, |
|
"loss": 0.7358, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.359804520464264e-06, |
|
"loss": 0.6656, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.3475870494807575e-06, |
|
"loss": 0.5024, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.3353695784972514e-06, |
|
"loss": 0.5852, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.323152107513745e-06, |
|
"loss": 1.4108, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.3109346365302386e-06, |
|
"loss": 0.7409, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.298717165546732e-06, |
|
"loss": 0.8206, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 3.2864996945632254e-06, |
|
"loss": 0.5859, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 3.2742822235797197e-06, |
|
"loss": 0.6545, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 3.262064752596213e-06, |
|
"loss": 0.9185, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 3.2498472816127065e-06, |
|
"loss": 0.6621, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 3.2376298106292e-06, |
|
"loss": 0.709, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.2254123396456937e-06, |
|
"loss": 0.794, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.213194868662187e-06, |
|
"loss": 0.9259, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.2009773976786805e-06, |
|
"loss": 0.6645, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.188759926695174e-06, |
|
"loss": 0.5279, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.176542455711668e-06, |
|
"loss": 0.5843, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.1643249847281615e-06, |
|
"loss": 0.7248, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.152107513744655e-06, |
|
"loss": 0.9155, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.1398900427611488e-06, |
|
"loss": 0.8249, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.1276725717776426e-06, |
|
"loss": 0.8832, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.115455100794136e-06, |
|
"loss": 0.7879, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.1032376298106294e-06, |
|
"loss": 0.8555, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.0910201588271232e-06, |
|
"loss": 0.8502, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.0788026878436166e-06, |
|
"loss": 0.8743, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.06658521686011e-06, |
|
"loss": 0.9172, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.0543677458766034e-06, |
|
"loss": 1.0247, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.0421502748930977e-06, |
|
"loss": 0.6688, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 3.029932803909591e-06, |
|
"loss": 0.808, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 3.0177153329260845e-06, |
|
"loss": 0.6848, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 3.005497861942578e-06, |
|
"loss": 0.7437, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 2.9932803909590717e-06, |
|
"loss": 0.7942, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 2.981062919975565e-06, |
|
"loss": 0.6665, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 2.968845448992059e-06, |
|
"loss": 0.6577, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 2.9566279780085528e-06, |
|
"loss": 0.7812, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 2.944410507025046e-06, |
|
"loss": 0.9, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 2.9321930360415396e-06, |
|
"loss": 0.7488, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 2.919975565058033e-06, |
|
"loss": 0.8175, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 2.907758094074527e-06, |
|
"loss": 0.8135, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 2.8955406230910206e-06, |
|
"loss": 0.7171, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 2.883323152107514e-06, |
|
"loss": 0.7223, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 2.8711056811240074e-06, |
|
"loss": 0.7668, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 2.8588882101405012e-06, |
|
"loss": 0.9482, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 2.8466707391569946e-06, |
|
"loss": 0.6344, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 2.834453268173488e-06, |
|
"loss": 0.8507, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 2.8222357971899814e-06, |
|
"loss": 0.7731, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 2.8100183262064757e-06, |
|
"loss": 0.8345, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 2.797800855222969e-06, |
|
"loss": 0.9627, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 2.7855833842394625e-06, |
|
"loss": 0.8368, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 2.7733659132559563e-06, |
|
"loss": 0.763, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 2.76114844227245e-06, |
|
"loss": 0.7243, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 2.7489309712889435e-06, |
|
"loss": 0.7533, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 2.736713500305437e-06, |
|
"loss": 0.7682, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 2.7244960293219308e-06, |
|
"loss": 0.566, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 2.712278558338424e-06, |
|
"loss": 0.8787, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 2.7000610873549176e-06, |
|
"loss": 0.693, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 2.687843616371411e-06, |
|
"loss": 0.8619, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 2.6756261453879052e-06, |
|
"loss": 0.7697, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 2.6634086744043986e-06, |
|
"loss": 0.796, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 2.651191203420892e-06, |
|
"loss": 0.7758, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 2.6389737324373854e-06, |
|
"loss": 0.6839, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 2.6267562614538793e-06, |
|
"loss": 0.6863, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 2.6145387904703727e-06, |
|
"loss": 0.7189, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 2.6023213194868665e-06, |
|
"loss": 0.8117, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 2.5901038485033603e-06, |
|
"loss": 0.7661, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 2.5778863775198537e-06, |
|
"loss": 0.7381, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 2.565668906536347e-06, |
|
"loss": 0.7369, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 2.5534514355528405e-06, |
|
"loss": 0.568, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 2.5412339645693348e-06, |
|
"loss": 1.1623, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 2.529016493585828e-06, |
|
"loss": 1.0099, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 2.5167990226023216e-06, |
|
"loss": 0.7138, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 2.504581551618815e-06, |
|
"loss": 0.7981, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 2.492364080635309e-06, |
|
"loss": 0.6615, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 2.480146609651802e-06, |
|
"loss": 0.683, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 2.4679291386682956e-06, |
|
"loss": 0.7059, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 2.4557116676847894e-06, |
|
"loss": 0.5579, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 2.443494196701283e-06, |
|
"loss": 0.8688, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 2.4312767257177766e-06, |
|
"loss": 0.8973, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 2.4190592547342705e-06, |
|
"loss": 0.8341, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 2.406841783750764e-06, |
|
"loss": 0.7981, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 2.3946243127672573e-06, |
|
"loss": 0.7808, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 2.382406841783751e-06, |
|
"loss": 0.8974, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 2.3701893708002445e-06, |
|
"loss": 0.7289, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 2.357971899816738e-06, |
|
"loss": 0.7379, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 2.3457544288332317e-06, |
|
"loss": 0.6979, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 2.333536957849725e-06, |
|
"loss": 0.665, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 2.321319486866219e-06, |
|
"loss": 0.7104, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 2.3091020158827124e-06, |
|
"loss": 0.7952, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 2.296884544899206e-06, |
|
"loss": 0.7456, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 2.2846670739156996e-06, |
|
"loss": 0.9251, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 2.2724496029321934e-06, |
|
"loss": 0.6979, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 2.260232131948687e-06, |
|
"loss": 0.6558, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 2.2480146609651802e-06, |
|
"loss": 0.4833, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 2.235797189981674e-06, |
|
"loss": 0.7859, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 2.2235797189981674e-06, |
|
"loss": 0.9417, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 2.2113622480146613e-06, |
|
"loss": 0.7454, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 2.1991447770311547e-06, |
|
"loss": 0.8095, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 2.1869273060476485e-06, |
|
"loss": 0.9811, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 2.174709835064142e-06, |
|
"loss": 0.9414, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 2.1624923640806357e-06, |
|
"loss": 0.8008, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 2.150274893097129e-06, |
|
"loss": 0.6651, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 2.1380574221136225e-06, |
|
"loss": 0.6623, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 2.1258399511301163e-06, |
|
"loss": 1.1989, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 2.1136224801466097e-06, |
|
"loss": 0.6598, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 2.101405009163103e-06, |
|
"loss": 1.6785, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 2.089187538179597e-06, |
|
"loss": 0.6844, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 2.0769700671960904e-06, |
|
"loss": 0.8328, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 2.064752596212584e-06, |
|
"loss": 0.6601, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 2.052535125229078e-06, |
|
"loss": 0.6997, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 2.0403176542455714e-06, |
|
"loss": 0.7009, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 2.028100183262065e-06, |
|
"loss": 0.6133, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 2.0158827122785587e-06, |
|
"loss": 0.8755, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 2.003665241295052e-06, |
|
"loss": 0.9878, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.9914477703115455e-06, |
|
"loss": 0.6225, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.9792302993280393e-06, |
|
"loss": 0.7081, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.9670128283445327e-06, |
|
"loss": 0.707, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.9547953573610265e-06, |
|
"loss": 0.6, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.94257788637752e-06, |
|
"loss": 0.7417, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.9303604153940137e-06, |
|
"loss": 0.6713, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.918142944410507e-06, |
|
"loss": 0.6873, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.9059254734270008e-06, |
|
"loss": 0.688, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.8937080024434942e-06, |
|
"loss": 0.6411, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.881490531459988e-06, |
|
"loss": 0.6042, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.8692730604764814e-06, |
|
"loss": 0.8837, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.857055589492975e-06, |
|
"loss": 0.6633, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.8448381185094688e-06, |
|
"loss": 0.7232, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 1.8326206475259622e-06, |
|
"loss": 0.9185, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 1.820403176542456e-06, |
|
"loss": 0.7827, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 1.8081857055589494e-06, |
|
"loss": 0.8085, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 1.795968234575443e-06, |
|
"loss": 0.6985, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.7837507635919365e-06, |
|
"loss": 0.7934, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.7715332926084303e-06, |
|
"loss": 0.8543, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.7593158216249237e-06, |
|
"loss": 0.8494, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.7470983506414175e-06, |
|
"loss": 0.8099, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.734880879657911e-06, |
|
"loss": 0.9976, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.7226634086744045e-06, |
|
"loss": 0.7376, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.710445937690898e-06, |
|
"loss": 1.1162, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.6982284667073918e-06, |
|
"loss": 0.7792, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.6860109957238852e-06, |
|
"loss": 0.6776, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.6737935247403788e-06, |
|
"loss": 0.707, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.6615760537568726e-06, |
|
"loss": 0.7437, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.649358582773366e-06, |
|
"loss": 0.6561, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.6371411117898598e-06, |
|
"loss": 0.8136, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.6249236408063532e-06, |
|
"loss": 1.4898, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.6127061698228468e-06, |
|
"loss": 0.6557, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.6004886988393402e-06, |
|
"loss": 0.6073, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.588271227855834e-06, |
|
"loss": 0.6854, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.5760537568723275e-06, |
|
"loss": 0.7424, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.5638362858888213e-06, |
|
"loss": 0.7488, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.5516188149053147e-06, |
|
"loss": 0.8251, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.5394013439218083e-06, |
|
"loss": 0.8284, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.5271838729383017e-06, |
|
"loss": 0.8033, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.5149664019547955e-06, |
|
"loss": 0.8289, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.502748930971289e-06, |
|
"loss": 0.7737, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.4905314599877826e-06, |
|
"loss": 0.7291, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.4783139890042764e-06, |
|
"loss": 0.8116, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.4660965180207698e-06, |
|
"loss": 1.0451, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.4538790470372636e-06, |
|
"loss": 0.7102, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.441661576053757e-06, |
|
"loss": 0.7109, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.4294441050702506e-06, |
|
"loss": 0.7669, |
|
"step": 1571 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.417226634086744e-06, |
|
"loss": 0.8496, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.4050091631032378e-06, |
|
"loss": 0.5765, |
|
"step": 1573 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.3927916921197312e-06, |
|
"loss": 0.7913, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.380574221136225e-06, |
|
"loss": 0.7969, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.3683567501527185e-06, |
|
"loss": 0.7679, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.356139279169212e-06, |
|
"loss": 0.6865, |
|
"step": 1577 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.3439218081857055e-06, |
|
"loss": 0.8489, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.3317043372021993e-06, |
|
"loss": 0.7835, |
|
"step": 1579 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.3194868662186927e-06, |
|
"loss": 0.8376, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.3072693952351863e-06, |
|
"loss": 0.7795, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.2950519242516802e-06, |
|
"loss": 0.5199, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.2828344532681736e-06, |
|
"loss": 0.7229, |
|
"step": 1583 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.2706169822846674e-06, |
|
"loss": 0.6173, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.2583995113011608e-06, |
|
"loss": 0.7001, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.2461820403176544e-06, |
|
"loss": 0.9355, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.2339645693341478e-06, |
|
"loss": 0.8718, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.2217470983506414e-06, |
|
"loss": 0.8655, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.2095296273671352e-06, |
|
"loss": 0.7935, |
|
"step": 1589 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.1973121563836286e-06, |
|
"loss": 0.8255, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.1850946854001223e-06, |
|
"loss": 0.7109, |
|
"step": 1591 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.1728772144166159e-06, |
|
"loss": 0.9435, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.1606597434331095e-06, |
|
"loss": 0.8143, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.148442272449603e-06, |
|
"loss": 0.6305, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.1362248014660967e-06, |
|
"loss": 0.6734, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.1240073304825901e-06, |
|
"loss": 0.6738, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.1117898594990837e-06, |
|
"loss": 0.8927, |
|
"step": 1597 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.0995723885155773e-06, |
|
"loss": 0.612, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.087354917532071e-06, |
|
"loss": 0.6755, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.0751374465485646e-06, |
|
"loss": 0.6552, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.0629199755650582e-06, |
|
"loss": 0.8736, |
|
"step": 1601 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.0507025045815516e-06, |
|
"loss": 0.8527, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.0384850335980452e-06, |
|
"loss": 0.5696, |
|
"step": 1603 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.026267562614539e-06, |
|
"loss": 1.0667, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.0140500916310324e-06, |
|
"loss": 0.665, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.001832620647526e-06, |
|
"loss": 0.8809, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 9.896151496640196e-07, |
|
"loss": 0.7854, |
|
"step": 1607 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 9.773976786805133e-07, |
|
"loss": 0.8904, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 9.651802076970069e-07, |
|
"loss": 0.7939, |
|
"step": 1609 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 9.529627367135004e-07, |
|
"loss": 0.9678, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 9.40745265729994e-07, |
|
"loss": 0.5953, |
|
"step": 1611 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 9.285277947464875e-07, |
|
"loss": 0.6762, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 9.163103237629811e-07, |
|
"loss": 0.7095, |
|
"step": 1613 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 9.040928527794747e-07, |
|
"loss": 0.8773, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 8.918753817959682e-07, |
|
"loss": 0.8098, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 8.796579108124618e-07, |
|
"loss": 0.5868, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 8.674404398289555e-07, |
|
"loss": 0.6362, |
|
"step": 1617 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 8.55222968845449e-07, |
|
"loss": 0.8017, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 8.430054978619426e-07, |
|
"loss": 0.8324, |
|
"step": 1619 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 8.307880268784363e-07, |
|
"loss": 0.7464, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 8.185705558949299e-07, |
|
"loss": 0.6944, |
|
"step": 1621 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 8.063530849114234e-07, |
|
"loss": 0.9742, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 7.94135613927917e-07, |
|
"loss": 0.7516, |
|
"step": 1623 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 7.819181429444106e-07, |
|
"loss": 0.8232, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 7.697006719609042e-07, |
|
"loss": 0.8384, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 7.574832009773978e-07, |
|
"loss": 0.7824, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 7.452657299938913e-07, |
|
"loss": 0.7618, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 7.330482590103849e-07, |
|
"loss": 0.6505, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 7.208307880268785e-07, |
|
"loss": 0.6294, |
|
"step": 1629 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 7.08613317043372e-07, |
|
"loss": 0.9379, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 6.963958460598656e-07, |
|
"loss": 0.6789, |
|
"step": 1631 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 6.841783750763592e-07, |
|
"loss": 0.4977, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 6.719609040928527e-07, |
|
"loss": 0.7115, |
|
"step": 1633 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 6.597434331093464e-07, |
|
"loss": 0.65, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 6.475259621258401e-07, |
|
"loss": 0.8023, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 6.353084911423337e-07, |
|
"loss": 0.8099, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 6.230910201588272e-07, |
|
"loss": 0.8688, |
|
"step": 1637 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 6.108735491753207e-07, |
|
"loss": 0.9006, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 5.986560781918143e-07, |
|
"loss": 0.687, |
|
"step": 1639 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 5.864386072083079e-07, |
|
"loss": 1.039, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 5.742211362248015e-07, |
|
"loss": 0.5374, |
|
"step": 1641 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 5.620036652412951e-07, |
|
"loss": 1.2662, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 5.497861942577887e-07, |
|
"loss": 1.2237, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 5.375687232742823e-07, |
|
"loss": 0.5774, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 5.253512522907758e-07, |
|
"loss": 1.6024, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 5.131337813072695e-07, |
|
"loss": 0.8005, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 5.00916310323763e-07, |
|
"loss": 0.6918, |
|
"step": 1647 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 4.886988393402566e-07, |
|
"loss": 0.7882, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 4.764813683567502e-07, |
|
"loss": 0.7198, |
|
"step": 1649 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 4.6426389737324375e-07, |
|
"loss": 0.7794, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 4.5204642638973736e-07, |
|
"loss": 0.6417, |
|
"step": 1651 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 4.398289554062309e-07, |
|
"loss": 0.9639, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 4.276114844227245e-07, |
|
"loss": 0.7515, |
|
"step": 1653 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 4.1539401343921815e-07, |
|
"loss": 0.7448, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 4.031765424557117e-07, |
|
"loss": 0.779, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 3.909590714722053e-07, |
|
"loss": 0.7944, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 3.787416004886989e-07, |
|
"loss": 0.6543, |
|
"step": 1657 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 3.6652412950519244e-07, |
|
"loss": 0.7429, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 3.54306658521686e-07, |
|
"loss": 0.8672, |
|
"step": 1659 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.420891875381796e-07, |
|
"loss": 0.7241, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.298717165546732e-07, |
|
"loss": 0.6672, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.1765424557116685e-07, |
|
"loss": 0.7061, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.0543677458766035e-07, |
|
"loss": 0.7905, |
|
"step": 1663 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 2.9321930360415397e-07, |
|
"loss": 0.8762, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 2.8100183262064753e-07, |
|
"loss": 0.9151, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 2.6878436163714114e-07, |
|
"loss": 0.8802, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 2.5656689065363475e-07, |
|
"loss": 0.6617, |
|
"step": 1667 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 2.443494196701283e-07, |
|
"loss": 0.7859, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.3213194868662187e-07, |
|
"loss": 0.8246, |
|
"step": 1669 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.1991447770311546e-07, |
|
"loss": 0.6458, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.0769700671960907e-07, |
|
"loss": 0.7112, |
|
"step": 1671 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 1.9547953573610266e-07, |
|
"loss": 0.7169, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.8326206475259622e-07, |
|
"loss": 0.7296, |
|
"step": 1673 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.710445937690898e-07, |
|
"loss": 0.8533, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.5882712278558342e-07, |
|
"loss": 0.8279, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.4660965180207698e-07, |
|
"loss": 0.6992, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 1.3439218081857057e-07, |
|
"loss": 1.1337, |
|
"step": 1677 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 1.2217470983506416e-07, |
|
"loss": 0.7624, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 1.0995723885155773e-07, |
|
"loss": 0.5473, |
|
"step": 1679 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 9.773976786805133e-08, |
|
"loss": 0.6376, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 8.55222968845449e-08, |
|
"loss": 0.7052, |
|
"step": 1681 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 7.330482590103849e-08, |
|
"loss": 0.8601, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 6.108735491753208e-08, |
|
"loss": 1.0671, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 4.8869883934025665e-08, |
|
"loss": 0.8117, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.6652412950519246e-08, |
|
"loss": 0.6085, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 2.4434941967012833e-08, |
|
"loss": 0.565, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.2217470983506416e-08, |
|
"loss": 0.5707, |
|
"step": 1687 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 0.0, |
|
"loss": 0.8251, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"step": 1688, |
|
"total_flos": 8.104089208236278e+17, |
|
"train_loss": 0.9200204672088838, |
|
"train_runtime": 22681.8435, |
|
"train_samples_per_second": 2.387, |
|
"train_steps_per_second": 0.074 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 1688, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 500, |
|
"total_flos": 8.104089208236278e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|