|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 730, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.681818181818182e-07, |
|
"loss": 1.6669, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.1363636363636364e-06, |
|
"loss": 1.4274, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.7045454545454546e-06, |
|
"loss": 1.5736, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.2727272727272728e-06, |
|
"loss": 1.6031, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.840909090909091e-06, |
|
"loss": 1.4013, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.409090909090909e-06, |
|
"loss": 1.4383, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.9772727272727275e-06, |
|
"loss": 1.4783, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 1.4839, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.113636363636364e-06, |
|
"loss": 1.2941, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 5.681818181818182e-06, |
|
"loss": 1.5711, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 6.25e-06, |
|
"loss": 1.5195, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 6.818181818181818e-06, |
|
"loss": 1.535, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 7.386363636363637e-06, |
|
"loss": 1.3355, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 7.954545454545455e-06, |
|
"loss": 1.5082, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 8.522727272727273e-06, |
|
"loss": 1.4538, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 1.3486, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.659090909090909e-06, |
|
"loss": 1.4842, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.0227272727272729e-05, |
|
"loss": 1.4197, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.0795454545454547e-05, |
|
"loss": 1.4488, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.1363636363636365e-05, |
|
"loss": 1.4491, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.1931818181818183e-05, |
|
"loss": 1.3609, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.25e-05, |
|
"loss": 1.4129, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.3068181818181819e-05, |
|
"loss": 1.1582, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 1.4224, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.4204545454545456e-05, |
|
"loss": 1.4216, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.4772727272727274e-05, |
|
"loss": 1.4054, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.534090909090909e-05, |
|
"loss": 1.3476, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.590909090909091e-05, |
|
"loss": 1.4123, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.6477272727272726e-05, |
|
"loss": 1.2541, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.7045454545454546e-05, |
|
"loss": 1.3971, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.7613636363636366e-05, |
|
"loss": 1.4469, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 1.3165, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 1.3095, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9318181818181818e-05, |
|
"loss": 1.2892, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9886363636363638e-05, |
|
"loss": 1.2706, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.0454545454545457e-05, |
|
"loss": 1.3491, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.1022727272727274e-05, |
|
"loss": 1.3282, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.1590909090909093e-05, |
|
"loss": 1.1812, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.215909090909091e-05, |
|
"loss": 1.3438, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 1.3827, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.3295454545454546e-05, |
|
"loss": 1.2663, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.3863636363636365e-05, |
|
"loss": 1.3777, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.4431818181818185e-05, |
|
"loss": 1.2316, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.3528, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.5568181818181817e-05, |
|
"loss": 1.379, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.6136363636363637e-05, |
|
"loss": 1.3162, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.6704545454545453e-05, |
|
"loss": 1.2006, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.7272727272727273e-05, |
|
"loss": 1.4458, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.784090909090909e-05, |
|
"loss": 1.2852, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.8409090909090912e-05, |
|
"loss": 1.2553, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.8977272727272732e-05, |
|
"loss": 1.4476, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.954545454545455e-05, |
|
"loss": 1.2309, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.0113636363636365e-05, |
|
"loss": 1.3228, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.068181818181818e-05, |
|
"loss": 1.3367, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.125e-05, |
|
"loss": 1.2866, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.181818181818182e-05, |
|
"loss": 1.3093, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.238636363636364e-05, |
|
"loss": 1.3932, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.295454545454545e-05, |
|
"loss": 1.1568, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.352272727272727e-05, |
|
"loss": 1.2535, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.409090909090909e-05, |
|
"loss": 1.2189, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.465909090909091e-05, |
|
"loss": 1.149, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.522727272727273e-05, |
|
"loss": 1.3026, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.579545454545455e-05, |
|
"loss": 1.2468, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 3.6363636363636364e-05, |
|
"loss": 1.4326, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 3.6931818181818184e-05, |
|
"loss": 1.2512, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 1.3661, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 3.8068181818181816e-05, |
|
"loss": 1.2677, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 3.8636363636363636e-05, |
|
"loss": 1.354, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 3.9204545454545456e-05, |
|
"loss": 1.2258, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 3.9772727272727275e-05, |
|
"loss": 1.1839, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.034090909090909e-05, |
|
"loss": 1.4097, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.0909090909090915e-05, |
|
"loss": 1.2499, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.1477272727272734e-05, |
|
"loss": 1.3991, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.204545454545455e-05, |
|
"loss": 1.2102, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.261363636363637e-05, |
|
"loss": 1.3301, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.318181818181819e-05, |
|
"loss": 1.2872, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.375e-05, |
|
"loss": 1.2391, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.431818181818182e-05, |
|
"loss": 1.2094, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.488636363636364e-05, |
|
"loss": 1.1217, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 1.3957, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.602272727272727e-05, |
|
"loss": 1.1267, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.659090909090909e-05, |
|
"loss": 1.0954, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.715909090909091e-05, |
|
"loss": 1.412, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.772727272727273e-05, |
|
"loss": 1.076, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.829545454545455e-05, |
|
"loss": 1.2825, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.886363636363637e-05, |
|
"loss": 1.1066, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.943181818181818e-05, |
|
"loss": 1.2088, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5e-05, |
|
"loss": 1.1819, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.999998461763134e-05, |
|
"loss": 1.2009, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.9999938470544264e-05, |
|
"loss": 1.1569, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.999986155879557e-05, |
|
"loss": 0.9333, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.9999753882479914e-05, |
|
"loss": 1.2257, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.9999615441729785e-05, |
|
"loss": 1.26, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.999944623671556e-05, |
|
"loss": 1.185, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.9999246267645464e-05, |
|
"loss": 1.2355, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.999901553476555e-05, |
|
"loss": 1.323, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.9998754038359785e-05, |
|
"loss": 1.2167, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.999846177874995e-05, |
|
"loss": 1.2381, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.9998138756295704e-05, |
|
"loss": 1.1787, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.999778497139455e-05, |
|
"loss": 1.2369, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.9997400424481844e-05, |
|
"loss": 1.265, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.999698511603082e-05, |
|
"loss": 1.1829, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.9996539046552546e-05, |
|
"loss": 1.2228, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.999606221659595e-05, |
|
"loss": 1.4031, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.999555462674781e-05, |
|
"loss": 1.2798, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.999501627763277e-05, |
|
"loss": 1.1808, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.999444716991332e-05, |
|
"loss": 1.2457, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.9993847304289774e-05, |
|
"loss": 1.3655, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.9993216681500346e-05, |
|
"loss": 1.187, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.999255530232105e-05, |
|
"loss": 1.1713, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.99918631675658e-05, |
|
"loss": 1.1947, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.9991140278086316e-05, |
|
"loss": 1.1169, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.999038663477218e-05, |
|
"loss": 1.2369, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.99896022385508e-05, |
|
"loss": 1.2075, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.998878709038748e-05, |
|
"loss": 1.2208, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.99879411912853e-05, |
|
"loss": 1.1711, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.998706454228524e-05, |
|
"loss": 1.2532, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.998615714446608e-05, |
|
"loss": 1.1868, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.998521899894446e-05, |
|
"loss": 1.2798, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.998425010687484e-05, |
|
"loss": 1.1243, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.998325046944955e-05, |
|
"loss": 1.3059, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.9982220087898715e-05, |
|
"loss": 1.2788, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.998115896349032e-05, |
|
"loss": 1.1768, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.998006709753017e-05, |
|
"loss": 1.2633, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.997894449136191e-05, |
|
"loss": 1.0365, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.9977791146367005e-05, |
|
"loss": 1.2884, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.997660706396474e-05, |
|
"loss": 1.2045, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.9975392245612254e-05, |
|
"loss": 1.2226, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.997414669280446e-05, |
|
"loss": 1.237, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.997287040707415e-05, |
|
"loss": 1.1576, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.99715633899919e-05, |
|
"loss": 1.2579, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.99702256431661e-05, |
|
"loss": 1.3043, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.996885716824299e-05, |
|
"loss": 1.2905, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.996745796690657e-05, |
|
"loss": 1.1184, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.996602804087871e-05, |
|
"loss": 1.0862, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.996456739191905e-05, |
|
"loss": 1.2715, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.996307602182505e-05, |
|
"loss": 1.0201, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.996155393243198e-05, |
|
"loss": 1.3834, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.996000112561289e-05, |
|
"loss": 1.1772, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.995841760327867e-05, |
|
"loss": 1.14, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.995680336737797e-05, |
|
"loss": 1.3104, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.995515841989726e-05, |
|
"loss": 1.1304, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.9953482762860796e-05, |
|
"loss": 1.0273, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.995177639833062e-05, |
|
"loss": 1.2024, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.995003932840657e-05, |
|
"loss": 1.2857, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.994827155522625e-05, |
|
"loss": 1.1091, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.994647308096509e-05, |
|
"loss": 1.3138, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.994464390783625e-05, |
|
"loss": 1.1692, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.994278403809071e-05, |
|
"loss": 1.2849, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.994089347401719e-05, |
|
"loss": 1.3361, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.993897221794221e-05, |
|
"loss": 1.1463, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.993702027223004e-05, |
|
"loss": 1.1808, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.9935037639282725e-05, |
|
"loss": 1.1949, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.993302432154008e-05, |
|
"loss": 1.2023, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.993098032147966e-05, |
|
"loss": 0.9441, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.9928905641616794e-05, |
|
"loss": 1.26, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.992680028450457e-05, |
|
"loss": 1.2594, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.9924664252733816e-05, |
|
"loss": 1.2523, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.99224975489331e-05, |
|
"loss": 1.0497, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.992030017576875e-05, |
|
"loss": 1.2882, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.991807213594484e-05, |
|
"loss": 1.1562, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.9915813432203165e-05, |
|
"loss": 1.1235, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.991352406732325e-05, |
|
"loss": 1.2257, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.991120404412238e-05, |
|
"loss": 1.2162, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.990885336545555e-05, |
|
"loss": 1.2173, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.9906472034215466e-05, |
|
"loss": 1.2527, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.990406005333259e-05, |
|
"loss": 1.1254, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.9901617425775067e-05, |
|
"loss": 1.2175, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.989914415454877e-05, |
|
"loss": 0.9104, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.9896640242697276e-05, |
|
"loss": 1.2549, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.9894105693301896e-05, |
|
"loss": 1.2029, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.989154050948159e-05, |
|
"loss": 1.1945, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.988894469439306e-05, |
|
"loss": 1.1285, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.988631825123069e-05, |
|
"loss": 1.2458, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.988366118322655e-05, |
|
"loss": 1.0894, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.9880973493650394e-05, |
|
"loss": 1.2417, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.9878255185809684e-05, |
|
"loss": 1.2962, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.987550626304952e-05, |
|
"loss": 1.1519, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.987272672875271e-05, |
|
"loss": 1.116, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.986991658633972e-05, |
|
"loss": 1.0908, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.986707583926867e-05, |
|
"loss": 1.0814, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.986420449103536e-05, |
|
"loss": 1.182, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.986130254517325e-05, |
|
"loss": 1.1839, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.985837000525343e-05, |
|
"loss": 1.0252, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.985540687488466e-05, |
|
"loss": 1.219, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.985241315771334e-05, |
|
"loss": 1.2573, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.98493888574235e-05, |
|
"loss": 1.1408, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.9846333977736813e-05, |
|
"loss": 1.2265, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.984324852241259e-05, |
|
"loss": 1.0908, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.984013249524775e-05, |
|
"loss": 1.2468, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.9836985900076844e-05, |
|
"loss": 1.2172, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.983380874077204e-05, |
|
"loss": 1.2108, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.9830601021243125e-05, |
|
"loss": 1.0567, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.982736274543748e-05, |
|
"loss": 1.3061, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.982409391734009e-05, |
|
"loss": 1.1814, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.982079454097354e-05, |
|
"loss": 1.1392, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.981746462039801e-05, |
|
"loss": 1.3159, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.981410415971127e-05, |
|
"loss": 1.1255, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.981071316304867e-05, |
|
"loss": 1.2056, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.980729163458312e-05, |
|
"loss": 1.2303, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.980383957852512e-05, |
|
"loss": 1.1711, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.9800356999122746e-05, |
|
"loss": 1.203, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.9796843900661613e-05, |
|
"loss": 1.2845, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.979330028746491e-05, |
|
"loss": 1.0433, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.978972616389337e-05, |
|
"loss": 1.1515, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.9786121534345265e-05, |
|
"loss": 1.1155, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.978248640325641e-05, |
|
"loss": 1.0288, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.9778820775100185e-05, |
|
"loss": 1.1976, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.977512465438744e-05, |
|
"loss": 1.1593, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.9771398045666606e-05, |
|
"loss": 1.3097, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.97676409535236e-05, |
|
"loss": 1.1394, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.976385338258186e-05, |
|
"loss": 1.2662, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.976003533750233e-05, |
|
"loss": 1.1382, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.9756186822983464e-05, |
|
"loss": 1.248, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.975230784376119e-05, |
|
"loss": 1.1389, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.974839840460895e-05, |
|
"loss": 1.0841, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.974445851033765e-05, |
|
"loss": 1.317, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.97404881657957e-05, |
|
"loss": 1.1366, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.973648737586894e-05, |
|
"loss": 1.2806, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.973245614548072e-05, |
|
"loss": 1.1281, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.972839447959181e-05, |
|
"loss": 1.2346, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.9724302383200477e-05, |
|
"loss": 1.2117, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.97201798613424e-05, |
|
"loss": 1.1455, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.9716026919090705e-05, |
|
"loss": 1.0981, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.971184356155597e-05, |
|
"loss": 1.023, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.970762979388618e-05, |
|
"loss": 1.2858, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.9703385621266766e-05, |
|
"loss": 1.0097, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.9699111048920554e-05, |
|
"loss": 1.0166, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.969480608210779e-05, |
|
"loss": 1.3232, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.9690470726126115e-05, |
|
"loss": 0.9815, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.968610498631058e-05, |
|
"loss": 1.175, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.9681708868033616e-05, |
|
"loss": 1.0065, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.967728237670504e-05, |
|
"loss": 1.0978, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.967282551777205e-05, |
|
"loss": 1.0995, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.96683382967192e-05, |
|
"loss": 1.1151, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.966382071906843e-05, |
|
"loss": 1.0575, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.965927279037901e-05, |
|
"loss": 0.8452, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.965469451624759e-05, |
|
"loss": 1.1209, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.965008590230812e-05, |
|
"loss": 1.171, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.9645446954231936e-05, |
|
"loss": 1.0801, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.9640777677727674e-05, |
|
"loss": 1.1266, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.963607807854129e-05, |
|
"loss": 1.2221, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.963134816245606e-05, |
|
"loss": 1.1198, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.962658793529258e-05, |
|
"loss": 1.1369, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.962179740290873e-05, |
|
"loss": 1.0799, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.961697657119968e-05, |
|
"loss": 1.1408, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.9612125446097905e-05, |
|
"loss": 1.1629, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.9607244033573156e-05, |
|
"loss": 1.0933, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.960233233963242e-05, |
|
"loss": 1.1474, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.9597390370320006e-05, |
|
"loss": 1.3268, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.959241813171743e-05, |
|
"loss": 1.1895, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.95874156299435e-05, |
|
"loss": 1.0743, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.958238287115421e-05, |
|
"loss": 1.16, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.957731986154285e-05, |
|
"loss": 1.2787, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.9572226607339886e-05, |
|
"loss": 1.0935, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.956710311481303e-05, |
|
"loss": 1.0916, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.9561949390267196e-05, |
|
"loss": 1.1169, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.95567654400445e-05, |
|
"loss": 1.0298, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.955155127052428e-05, |
|
"loss": 1.1667, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.9546306888123003e-05, |
|
"loss": 1.1274, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.9541032299294375e-05, |
|
"loss": 1.1245, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.953572751052924e-05, |
|
"loss": 1.0894, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.9530392528355626e-05, |
|
"loss": 1.1729, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.9525027359338696e-05, |
|
"loss": 1.096, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.951963201008076e-05, |
|
"loss": 1.1825, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.9514206487221304e-05, |
|
"loss": 1.0211, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.95087507974369e-05, |
|
"loss": 1.2285, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.9503264947441275e-05, |
|
"loss": 1.2042, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.949774894398524e-05, |
|
"loss": 1.088, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.949220279385676e-05, |
|
"loss": 1.1779, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.948662650388084e-05, |
|
"loss": 0.9534, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.948102008091962e-05, |
|
"loss": 1.1841, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.94753835318723e-05, |
|
"loss": 1.1233, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.946971686367516e-05, |
|
"loss": 1.1338, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.9464020083301544e-05, |
|
"loss": 1.1575, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.945829319776184e-05, |
|
"loss": 1.0758, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.945253621410351e-05, |
|
"loss": 1.172, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.944674913941102e-05, |
|
"loss": 1.2216, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.944093198080589e-05, |
|
"loss": 1.2167, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.9435084745446666e-05, |
|
"loss": 1.0337, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.9429207440528876e-05, |
|
"loss": 1.0044, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.942330007328509e-05, |
|
"loss": 1.1916, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.9417362650984835e-05, |
|
"loss": 0.9445, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.941139518093464e-05, |
|
"loss": 1.3121, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.9405397670478014e-05, |
|
"loss": 1.1049, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.9399370126995444e-05, |
|
"loss": 1.062, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.939331255790434e-05, |
|
"loss": 1.2379, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.93872249706591e-05, |
|
"loss": 1.0523, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.938110737275104e-05, |
|
"loss": 0.9423, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.937495977170841e-05, |
|
"loss": 1.1224, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.936878217509637e-05, |
|
"loss": 1.2134, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.936257459051703e-05, |
|
"loss": 0.9984, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.9356337025609365e-05, |
|
"loss": 1.1952, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.935006948804927e-05, |
|
"loss": 1.0955, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.934377198554948e-05, |
|
"loss": 1.1969, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.933744452585966e-05, |
|
"loss": 1.2647, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.933108711676632e-05, |
|
"loss": 1.0782, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.9324699766092806e-05, |
|
"loss": 1.0923, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.931828248169933e-05, |
|
"loss": 1.1035, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.9311835271482943e-05, |
|
"loss": 1.1281, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.93053581433775e-05, |
|
"loss": 0.8405, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.9298851105353696e-05, |
|
"loss": 1.1676, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.929231416541901e-05, |
|
"loss": 1.192, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.9285747331617746e-05, |
|
"loss": 1.183, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.927915061203099e-05, |
|
"loss": 0.9712, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.927252401477657e-05, |
|
"loss": 1.1965, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.926586754800912e-05, |
|
"loss": 1.0713, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.925918121992002e-05, |
|
"loss": 1.0439, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.925246503873739e-05, |
|
"loss": 1.1367, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.92457190127261e-05, |
|
"loss": 1.1343, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.923894315018773e-05, |
|
"loss": 1.1216, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.923213745946059e-05, |
|
"loss": 1.1822, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.922530194891969e-05, |
|
"loss": 1.0442, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.921843662697673e-05, |
|
"loss": 1.139, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.921154150208012e-05, |
|
"loss": 0.8287, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.920461658271492e-05, |
|
"loss": 1.1756, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.919766187740286e-05, |
|
"loss": 1.1312, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.9190677394702324e-05, |
|
"loss": 1.122, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.9183663143208355e-05, |
|
"loss": 1.0514, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.9176619131552604e-05, |
|
"loss": 1.1719, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.916954536840337e-05, |
|
"loss": 1.0202, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.916244186246555e-05, |
|
"loss": 1.1547, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.9155308622480625e-05, |
|
"loss": 1.2142, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 4.914814565722671e-05, |
|
"loss": 1.0676, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 4.9140952975518465e-05, |
|
"loss": 1.0258, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 4.9133730586207124e-05, |
|
"loss": 1.0105, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.9133730586207124e-05, |
|
"loss": 0.9735, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 4.912647849818049e-05, |
|
"loss": 1.1046, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 4.91191967203629e-05, |
|
"loss": 1.1104, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.911188526171524e-05, |
|
"loss": 0.9328, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 4.910454413123491e-05, |
|
"loss": 1.1438, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 4.909717333795584e-05, |
|
"loss": 1.1995, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 4.908977289094843e-05, |
|
"loss": 1.057, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.90823427993196e-05, |
|
"loss": 1.1436, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.9074883072212744e-05, |
|
"loss": 1.0117, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.906739371880773e-05, |
|
"loss": 1.1685, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.9059874748320876e-05, |
|
"loss": 1.1363, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.9052326170004936e-05, |
|
"loss": 1.1357, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.904474799314913e-05, |
|
"loss": 0.974, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 4.9037140227079065e-05, |
|
"loss": 1.2257, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.902950288115679e-05, |
|
"loss": 1.1061, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.902183596478073e-05, |
|
"loss": 1.0701, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 4.901413948738572e-05, |
|
"loss": 1.2365, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.900641345844298e-05, |
|
"loss": 1.0575, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.899865788746005e-05, |
|
"loss": 1.1325, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.899087278398088e-05, |
|
"loss": 1.1504, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.8983058157585717e-05, |
|
"loss": 1.0893, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.897521401789116e-05, |
|
"loss": 1.1198, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.896734037455014e-05, |
|
"loss": 1.1958, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.895943723725187e-05, |
|
"loss": 0.958, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.895150461572187e-05, |
|
"loss": 1.0629, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.894354251972193e-05, |
|
"loss": 1.0446, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.893555095905014e-05, |
|
"loss": 0.9373, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.892752994354082e-05, |
|
"loss": 1.11, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.8919479483064544e-05, |
|
"loss": 1.0709, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 4.891139958752813e-05, |
|
"loss": 1.2324, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.890329026687462e-05, |
|
"loss": 1.0608, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.889515153108324e-05, |
|
"loss": 1.1808, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.888698339016945e-05, |
|
"loss": 1.0626, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 4.887878585418487e-05, |
|
"loss": 1.1625, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 4.88705589332173e-05, |
|
"loss": 1.0463, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 4.8862302637390714e-05, |
|
"loss": 0.9999, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 4.88540169768652e-05, |
|
"loss": 1.2328, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 4.884570196183703e-05, |
|
"loss": 1.063, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 4.883735760253856e-05, |
|
"loss": 1.1949, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 4.882898390923828e-05, |
|
"loss": 1.0521, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 4.882058089224075e-05, |
|
"loss": 1.1375, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 4.881214856188666e-05, |
|
"loss": 1.1363, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.8803686928552736e-05, |
|
"loss": 1.0669, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.879519600265177e-05, |
|
"loss": 1.0104, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 4.8786675794632606e-05, |
|
"loss": 0.943, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 4.8778126314980136e-05, |
|
"loss": 1.2132, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 4.876954757421523e-05, |
|
"loss": 0.9231, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 4.876093958289484e-05, |
|
"loss": 0.9422, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 4.8752302351611836e-05, |
|
"loss": 1.2366, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 4.8743635890995124e-05, |
|
"loss": 0.904, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 4.873494021170953e-05, |
|
"loss": 1.0879, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 4.8726215324455905e-05, |
|
"loss": 0.9262, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 4.8717461239970975e-05, |
|
"loss": 1.0145, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 4.870867796902743e-05, |
|
"loss": 1.0186, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.8699865522433884e-05, |
|
"loss": 1.0292, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 4.8691023911034826e-05, |
|
"loss": 0.9748, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 4.868215314571065e-05, |
|
"loss": 0.7651, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.867325323737765e-05, |
|
"loss": 1.0372, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.866432419698792e-05, |
|
"loss": 1.0903, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.8655366035529483e-05, |
|
"loss": 1.0043, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 4.8646378764026126e-05, |
|
"loss": 1.0399, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.863736239353752e-05, |
|
"loss": 1.1461, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.862831693515909e-05, |
|
"loss": 1.0312, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.8619242400022096e-05, |
|
"loss": 1.0527, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.861013879929357e-05, |
|
"loss": 0.9872, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.8601006144176284e-05, |
|
"loss": 1.0633, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 4.859184444590882e-05, |
|
"loss": 1.0893, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.859184444590882e-05, |
|
"loss": 1.0137, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.858265371576545e-05, |
|
"loss": 1.0704, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.857343396505618e-05, |
|
"loss": 1.2629, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.856418520512676e-05, |
|
"loss": 1.1081, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.85549074473586e-05, |
|
"loss": 0.9953, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.8545600703168824e-05, |
|
"loss": 1.0844, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.85362649840102e-05, |
|
"loss": 1.1951, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.852690030137118e-05, |
|
"loss": 1.018, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.851750666677584e-05, |
|
"loss": 1.0113, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.8508084091783874e-05, |
|
"loss": 1.039, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.8498632587990625e-05, |
|
"loss": 0.953, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.8489152167026995e-05, |
|
"loss": 1.1071, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.8479642840559505e-05, |
|
"loss": 1.048, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.847010462029022e-05, |
|
"loss": 1.0457, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.8460537517956794e-05, |
|
"loss": 1.0223, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.845094154533239e-05, |
|
"loss": 1.1077, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.84413167142257e-05, |
|
"loss": 1.0166, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.8431663036480955e-05, |
|
"loss": 1.0992, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.8421980523977864e-05, |
|
"loss": 0.9356, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.841226918863162e-05, |
|
"loss": 1.1566, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.840252904239291e-05, |
|
"loss": 1.1303, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.839276009724783e-05, |
|
"loss": 1.0215, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.838296236521796e-05, |
|
"loss": 1.087, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.837313585836027e-05, |
|
"loss": 0.8805, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.836328058876717e-05, |
|
"loss": 1.0968, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.8353396568566454e-05, |
|
"loss": 1.0483, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.834348380992127e-05, |
|
"loss": 1.0528, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.833354232503019e-05, |
|
"loss": 1.0813, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 4.832357212612707e-05, |
|
"loss": 1.0018, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 4.8313573225481143e-05, |
|
"loss": 1.088, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 4.830354563539696e-05, |
|
"loss": 1.1499, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.8293489368214354e-05, |
|
"loss": 1.13, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.8283404436308464e-05, |
|
"loss": 0.952, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.8273290852089704e-05, |
|
"loss": 0.9332, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 4.826314862800375e-05, |
|
"loss": 1.1237, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 4.825297777653151e-05, |
|
"loss": 0.874, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 4.824277831018913e-05, |
|
"loss": 1.2359, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 4.823255024152796e-05, |
|
"loss": 1.0285, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 4.822229358313456e-05, |
|
"loss": 0.9887, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.8212008347630664e-05, |
|
"loss": 1.1521, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.820169454767318e-05, |
|
"loss": 0.9784, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.819135219595416e-05, |
|
"loss": 0.8608, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.818098130520078e-05, |
|
"loss": 1.0483, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.817058188817536e-05, |
|
"loss": 1.1391, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 4.8160153957675316e-05, |
|
"loss": 0.8631, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 4.8149697526533145e-05, |
|
"loss": 1.0571, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 4.813921260761642e-05, |
|
"loss": 1.022, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 4.8128699213827785e-05, |
|
"loss": 1.1186, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 4.81181573581049e-05, |
|
"loss": 1.1906, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 4.8107587053420455e-05, |
|
"loss": 1.0026, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 4.8096988312782174e-05, |
|
"loss": 1.011, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 4.808636114923274e-05, |
|
"loss": 1.0215, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 4.807570557584984e-05, |
|
"loss": 1.0448, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 4.8065021605746104e-05, |
|
"loss": 0.7449, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 4.8054309252069114e-05, |
|
"loss": 1.0893, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 4.8043568528001384e-05, |
|
"loss": 1.1073, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 4.803279944676032e-05, |
|
"loss": 1.1046, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 4.802200202159827e-05, |
|
"loss": 0.8914, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.8011176265802415e-05, |
|
"loss": 1.1037, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.8000322192694814e-05, |
|
"loss": 0.9823, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 4.798943981563238e-05, |
|
"loss": 0.9635, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.797852914800684e-05, |
|
"loss": 1.0584, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.796759020324477e-05, |
|
"loss": 1.0476, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 4.7956622994807495e-05, |
|
"loss": 1.0305, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.7945627536191166e-05, |
|
"loss": 1.1159, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.7934603840926675e-05, |
|
"loss": 0.9567, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 4.7923551922579656e-05, |
|
"loss": 1.0536, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.7912471794750496e-05, |
|
"loss": 0.7428, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.790136347107427e-05, |
|
"loss": 1.0945, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 4.7890226965220785e-05, |
|
"loss": 1.0525, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.787906229089448e-05, |
|
"loss": 1.0444, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.7867869461834514e-05, |
|
"loss": 0.9727, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 4.785664849181465e-05, |
|
"loss": 1.097, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 4.78453993946433e-05, |
|
"loss": 0.953, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.783412218416348e-05, |
|
"loss": 1.0736, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.78228168742528e-05, |
|
"loss": 1.1363, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 4.781148347882347e-05, |
|
"loss": 0.9819, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 4.780012201182225e-05, |
|
"loss": 0.9414, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 4.7788732487230434e-05, |
|
"loss": 0.9277, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 4.777731491906384e-05, |
|
"loss": 0.9638, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 4.7765869321372836e-05, |
|
"loss": 1.0214, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 4.7754395708242226e-05, |
|
"loss": 1.0234, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 4.774289409379133e-05, |
|
"loss": 0.8438, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 4.773136449217391e-05, |
|
"loss": 1.0616, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 4.771980691757819e-05, |
|
"loss": 1.1344, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 4.770822138422677e-05, |
|
"loss": 0.9735, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 4.769660790637671e-05, |
|
"loss": 1.061, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 4.768496649831942e-05, |
|
"loss": 0.9322, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 4.767329717438071e-05, |
|
"loss": 1.0904, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 4.76615999489207e-05, |
|
"loss": 1.0558, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 4.76498748363339e-05, |
|
"loss": 1.0602, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 4.76381218510491e-05, |
|
"loss": 0.9079, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 4.76263410075294e-05, |
|
"loss": 1.1412, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 4.7614532320272174e-05, |
|
"loss": 1.0225, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 4.760269580380909e-05, |
|
"loss": 1.0012, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 4.759083147270602e-05, |
|
"loss": 1.1793, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 4.7578939341563095e-05, |
|
"loss": 0.9858, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 4.7567019425014644e-05, |
|
"loss": 1.059, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 4.755507173772919e-05, |
|
"loss": 1.0591, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 4.754309629440943e-05, |
|
"loss": 1.0188, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 4.753109310979224e-05, |
|
"loss": 1.0426, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.751906219864857e-05, |
|
"loss": 1.1116, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.750700357578357e-05, |
|
"loss": 0.8753, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.749491725603644e-05, |
|
"loss": 0.9712, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 4.7482803254280484e-05, |
|
"loss": 0.9633, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 4.747066158542306e-05, |
|
"loss": 0.8521, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 4.7458492264405574e-05, |
|
"loss": 1.0322, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 4.7446295306203474e-05, |
|
"loss": 0.9904, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 4.743407072582621e-05, |
|
"loss": 1.1605, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 4.742181853831721e-05, |
|
"loss": 0.9827, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 4.74095387587539e-05, |
|
"loss": 1.0952, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 4.739723140224763e-05, |
|
"loss": 0.9814, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 4.738489648394373e-05, |
|
"loss": 1.0741, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 4.73725340190214e-05, |
|
"loss": 0.9566, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 4.736014402269376e-05, |
|
"loss": 0.913, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 4.734772651020782e-05, |
|
"loss": 1.151, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 4.733528149684444e-05, |
|
"loss": 0.9932, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 4.732280899791832e-05, |
|
"loss": 1.1216, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.7310309028777976e-05, |
|
"loss": 0.9726, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.729778160480576e-05, |
|
"loss": 1.0382, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.728522674141776e-05, |
|
"loss": 1.0765, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.727264445406388e-05, |
|
"loss": 1.0005, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.726003475822775e-05, |
|
"loss": 0.931, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 4.7247397669426716e-05, |
|
"loss": 0.8656, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.723473320321186e-05, |
|
"loss": 1.1407, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.7222041375167936e-05, |
|
"loss": 0.845, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.720932220091337e-05, |
|
"loss": 0.8612, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 4.7196575696100254e-05, |
|
"loss": 1.1551, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 4.7183801876414294e-05, |
|
"loss": 0.8392, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 4.717100075757482e-05, |
|
"loss": 1.0083, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 4.715817235533476e-05, |
|
"loss": 0.8475, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 4.71453166854806e-05, |
|
"loss": 0.936, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 4.7132433763832404e-05, |
|
"loss": 0.9306, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 4.711952360624376e-05, |
|
"loss": 0.9495, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.710658622860176e-05, |
|
"loss": 0.8872, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.709362164682702e-05, |
|
"loss": 0.6822, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.70806298768736e-05, |
|
"loss": 0.9525, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.706761093472906e-05, |
|
"loss": 1.0159, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.705456483641435e-05, |
|
"loss": 0.9192, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 4.704149159798387e-05, |
|
"loss": 0.9538, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 4.702839123552541e-05, |
|
"loss": 1.0676, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 4.7015263765160154e-05, |
|
"loss": 0.9409, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 4.70021092030426e-05, |
|
"loss": 0.9745, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 4.698892756536064e-05, |
|
"loss": 0.8958, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 4.697571886833544e-05, |
|
"loss": 1.0001, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 4.696248312822149e-05, |
|
"loss": 1.026, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 4.6949220361306555e-05, |
|
"loss": 1.0302, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 4.693593058391165e-05, |
|
"loss": 0.9987, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 4.692261381239105e-05, |
|
"loss": 1.1951, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 4.690927006313222e-05, |
|
"loss": 1.0323, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 4.689589935255585e-05, |
|
"loss": 0.9272, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 4.688250169711578e-05, |
|
"loss": 0.9996, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 4.686907711329903e-05, |
|
"loss": 1.1128, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 4.6855625617625763e-05, |
|
"loss": 0.9461, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 4.684214722664924e-05, |
|
"loss": 0.9449, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 4.682864195695582e-05, |
|
"loss": 0.9528, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 4.681510982516496e-05, |
|
"loss": 0.8721, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 4.680155084792914e-05, |
|
"loss": 1.0428, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 4.678796504193392e-05, |
|
"loss": 1.0111, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 4.677435242389784e-05, |
|
"loss": 0.9557, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 4.676071301057243e-05, |
|
"loss": 0.9475, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 4.674704681874223e-05, |
|
"loss": 1.039, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 4.6733353865224694e-05, |
|
"loss": 0.9335, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 4.671963416687024e-05, |
|
"loss": 1.0222, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 4.670588774056218e-05, |
|
"loss": 0.8481, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 4.669211460321673e-05, |
|
"loss": 1.0872, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 4.667831477178295e-05, |
|
"loss": 1.0565, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 4.666448826324278e-05, |
|
"loss": 0.9574, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 4.665063509461097e-05, |
|
"loss": 1.0013, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 4.663675528293509e-05, |
|
"loss": 0.807, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 4.662284884529549e-05, |
|
"loss": 1.0153, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 4.660891579880528e-05, |
|
"loss": 0.9717, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 4.6594956160610325e-05, |
|
"loss": 0.9686, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 4.6580969947889216e-05, |
|
"loss": 1.0015, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 4.656695717785323e-05, |
|
"loss": 0.9288, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 4.6552917867746324e-05, |
|
"loss": 1.0121, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 4.653885203484515e-05, |
|
"loss": 1.0764, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 4.652475969645896e-05, |
|
"loss": 1.0444, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 4.651064086992965e-05, |
|
"loss": 0.8733, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 4.6496495572631675e-05, |
|
"loss": 0.8512, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 4.6482323821972105e-05, |
|
"loss": 1.0505, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 4.6468125635390556e-05, |
|
"loss": 0.7957, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 4.6453901030359154e-05, |
|
"loss": 1.1614, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 4.6439650024382547e-05, |
|
"loss": 0.9545, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.6439650024382547e-05, |
|
"loss": 0.9162, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 4.642537263499788e-05, |
|
"loss": 1.0761, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 4.6411068879774754e-05, |
|
"loss": 0.9022, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 4.639673877631523e-05, |
|
"loss": 0.7747, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 4.6382382342253775e-05, |
|
"loss": 0.9697, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 4.636799959525726e-05, |
|
"loss": 1.0663, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 4.635359055302495e-05, |
|
"loss": 0.7292, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 4.633915523328846e-05, |
|
"loss": 0.9178, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 4.632469365381174e-05, |
|
"loss": 0.9554, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 4.631020583239107e-05, |
|
"loss": 1.0515, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 4.629569178685499e-05, |
|
"loss": 1.1199, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 4.628115153506435e-05, |
|
"loss": 0.9295, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 4.626658509491222e-05, |
|
"loss": 0.9321, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 4.625199248432391e-05, |
|
"loss": 0.9355, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 4.6237373721256935e-05, |
|
"loss": 0.9687, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 4.6222728823700995e-05, |
|
"loss": 0.6614, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 4.620805780967794e-05, |
|
"loss": 1.0145, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 4.619336069724177e-05, |
|
"loss": 1.0242, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 4.617863750447861e-05, |
|
"loss": 1.0289, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 4.616388824950664e-05, |
|
"loss": 0.8058, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 4.614911295047615e-05, |
|
"loss": 1.0225, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 4.613431162556947e-05, |
|
"loss": 0.8832, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 4.6119484293000955e-05, |
|
"loss": 0.8861, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 4.610463097101696e-05, |
|
"loss": 0.9833, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 4.6089751677895836e-05, |
|
"loss": 0.9703, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 4.607484643194788e-05, |
|
"loss": 0.9511, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 4.605991525151533e-05, |
|
"loss": 1.0516, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 4.604495815497234e-05, |
|
"loss": 0.8705, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 4.6029975160724945e-05, |
|
"loss": 0.9725, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 4.6014966287211084e-05, |
|
"loss": 0.6535, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 4.59999315529005e-05, |
|
"loss": 1.013, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 4.598487097629479e-05, |
|
"loss": 0.9731, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 4.5969784575927324e-05, |
|
"loss": 0.9715, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 4.595467237036329e-05, |
|
"loss": 0.898, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 4.59395343781996e-05, |
|
"loss": 1.0253, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 4.5924370618064913e-05, |
|
"loss": 0.8818, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 4.590918110861958e-05, |
|
"loss": 0.9952, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 4.5893965868555664e-05, |
|
"loss": 1.0598, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 4.5878724916596874e-05, |
|
"loss": 0.9002, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 4.586345827149856e-05, |
|
"loss": 0.8679, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 4.58481659520477e-05, |
|
"loss": 0.8458, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 4.5832847977062874e-05, |
|
"loss": 0.8581, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 4.581750436539421e-05, |
|
"loss": 0.9438, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 4.5802135135923386e-05, |
|
"loss": 0.9411, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 4.5786740307563636e-05, |
|
"loss": 0.7669, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 4.5771319899259656e-05, |
|
"loss": 0.9775, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 4.5755873929987634e-05, |
|
"loss": 1.0761, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 4.5740402418755246e-05, |
|
"loss": 0.8978, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 4.572490538460154e-05, |
|
"loss": 0.9787, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 4.570938284659702e-05, |
|
"loss": 0.8622, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 4.5693834823843556e-05, |
|
"loss": 1.0193, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 4.5678261335474384e-05, |
|
"loss": 0.9681, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 4.566266240065406e-05, |
|
"loss": 0.9882, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 4.564703803857849e-05, |
|
"loss": 0.8362, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 4.5631388268474837e-05, |
|
"loss": 1.0627, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 4.5615713109601544e-05, |
|
"loss": 0.9333, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 4.56000125812483e-05, |
|
"loss": 0.93, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 4.558428670273601e-05, |
|
"loss": 1.1188, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 4.556853549341679e-05, |
|
"loss": 0.9112, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 4.555275897267388e-05, |
|
"loss": 0.9868, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 4.553695715992172e-05, |
|
"loss": 0.974, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 4.552113007460586e-05, |
|
"loss": 0.952, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 4.550527773620293e-05, |
|
"loss": 0.9677, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 4.5489400164220666e-05, |
|
"loss": 1.0329, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 4.5473497378197835e-05, |
|
"loss": 0.7958, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 4.545756939770423e-05, |
|
"loss": 0.8831, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 4.5441616242340665e-05, |
|
"loss": 0.8805, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 4.542563793173893e-05, |
|
"loss": 0.7746, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 4.540963448556176e-05, |
|
"loss": 0.9577, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 4.539360592350282e-05, |
|
"loss": 0.9146, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 4.537755226528671e-05, |
|
"loss": 1.1038, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 4.5361473530668874e-05, |
|
"loss": 0.9022, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 4.534536973943564e-05, |
|
"loss": 1.0197, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 4.532924091140417e-05, |
|
"loss": 0.9027, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 4.531308706642243e-05, |
|
"loss": 0.9918, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 4.529690822436916e-05, |
|
"loss": 0.875, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 4.528070440515388e-05, |
|
"loss": 0.8319, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 4.526447562871685e-05, |
|
"loss": 1.0758, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 4.5248221915029014e-05, |
|
"loss": 0.9287, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 4.523194328409203e-05, |
|
"loss": 1.047, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 4.5215639755938214e-05, |
|
"loss": 0.9021, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 4.519931135063051e-05, |
|
"loss": 0.9434, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 4.518295808826249e-05, |
|
"loss": 1.0068, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.5166579988958296e-05, |
|
"loss": 0.9381, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.515017707287265e-05, |
|
"loss": 0.8563, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 4.5133749360190805e-05, |
|
"loss": 0.7951, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 4.5117296871128546e-05, |
|
"loss": 1.0672, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 4.510081962593211e-05, |
|
"loss": 0.7677, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 4.508431764487824e-05, |
|
"loss": 0.774, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 4.5067790948274094e-05, |
|
"loss": 1.0797, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 4.5051239556457244e-05, |
|
"loss": 0.7631, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 4.503466348979568e-05, |
|
"loss": 0.9337, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 4.501806276868772e-05, |
|
"loss": 0.7778, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 4.500143741356203e-05, |
|
"loss": 0.8628, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 4.4984787444877616e-05, |
|
"loss": 0.8446, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 4.4968112883123734e-05, |
|
"loss": 0.8722, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 4.495141374881995e-05, |
|
"loss": 0.8005, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 4.493469006251601e-05, |
|
"loss": 0.5906, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 4.491794184479194e-05, |
|
"loss": 0.8642, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 4.49011691162579e-05, |
|
"loss": 0.9482, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 4.488437189755424e-05, |
|
"loss": 0.8409, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 4.486755020935144e-05, |
|
"loss": 0.8732, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 4.485070407235009e-05, |
|
"loss": 0.9934, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 4.4833833507280884e-05, |
|
"loss": 0.858, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 4.481693853490454e-05, |
|
"loss": 0.8971, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 4.480001917601185e-05, |
|
"loss": 0.8115, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 4.478307545142359e-05, |
|
"loss": 0.9325, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 4.476610738199053e-05, |
|
"loss": 0.956, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 4.4749114988593396e-05, |
|
"loss": 0.9499, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 4.473209829214286e-05, |
|
"loss": 0.9287, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 4.471505731357949e-05, |
|
"loss": 1.1292, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 4.4697992073873724e-05, |
|
"loss": 0.9526, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 4.468090259402587e-05, |
|
"loss": 0.8573, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 4.466378889506607e-05, |
|
"loss": 0.9222, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 4.464665099805424e-05, |
|
"loss": 1.0377, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 4.462948892408012e-05, |
|
"loss": 0.8685, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 4.4612302694263174e-05, |
|
"loss": 0.8759, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 4.4595092329752583e-05, |
|
"loss": 0.8718, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 4.457785785172726e-05, |
|
"loss": 0.7879, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 4.4560599281395755e-05, |
|
"loss": 0.9711, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 4.454331663999629e-05, |
|
"loss": 0.9297, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 4.4526009948796703e-05, |
|
"loss": 0.8714, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 4.4508679229094425e-05, |
|
"loss": 0.8773, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 4.449132450221646e-05, |
|
"loss": 0.973, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 4.447394578951935e-05, |
|
"loss": 0.8554, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 4.445654311238915e-05, |
|
"loss": 0.955, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 4.443911649224143e-05, |
|
"loss": 0.7657, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 4.442166595052118e-05, |
|
"loss": 1.0131, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 4.4404191508702875e-05, |
|
"loss": 0.9818, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 4.4386693188290376e-05, |
|
"loss": 0.8895, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 4.4369171010816925e-05, |
|
"loss": 0.9271, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 4.435162499784513e-05, |
|
"loss": 0.7359, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 4.433405517096693e-05, |
|
"loss": 0.9381, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 4.431646155180358e-05, |
|
"loss": 0.899, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 4.4298844162005585e-05, |
|
"loss": 0.8745, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 4.4281203023252735e-05, |
|
"loss": 0.9215, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 4.426353815725403e-05, |
|
"loss": 0.8616, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 4.4245849585747654e-05, |
|
"loss": 0.9401, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 4.422813733050099e-05, |
|
"loss": 1.0099, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 4.4210401413310556e-05, |
|
"loss": 0.9682, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 4.4192641856001976e-05, |
|
"loss": 0.7944, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 4.417485868042998e-05, |
|
"loss": 0.7715, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 4.415705190847835e-05, |
|
"loss": 0.9758, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 4.413922156205992e-05, |
|
"loss": 0.7187, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 4.412136766311652e-05, |
|
"loss": 1.0898, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 4.410349023361898e-05, |
|
"loss": 0.8747, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 4.4085589295567065e-05, |
|
"loss": 0.9458, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 4.406766487098949e-05, |
|
"loss": 1.0059, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 4.4049716981943866e-05, |
|
"loss": 0.8319, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 4.4031745650516666e-05, |
|
"loss": 0.7012, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 4.401375089882324e-05, |
|
"loss": 0.9013, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 4.399573274900771e-05, |
|
"loss": 0.9979, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 4.397769122324305e-05, |
|
"loss": 0.6219, |
|
"step": 730 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 2920, |
|
"num_train_epochs": 20, |
|
"save_steps": 1000000000, |
|
"total_flos": 9.79415936794624e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|