|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"global_step": 508600, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.995084545812033e-05, |
|
"loss": 8.5269, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9901690916240665e-05, |
|
"loss": 3.887, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.985253637436099e-05, |
|
"loss": 3.3828, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.980338183248132e-05, |
|
"loss": 3.1234, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.975422729060165e-05, |
|
"loss": 2.9408, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.970507274872199e-05, |
|
"loss": 2.798, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.965591820684232e-05, |
|
"loss": 2.6747, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9606763664962646e-05, |
|
"loss": 2.6112, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9557609123082973e-05, |
|
"loss": 2.5234, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.950845458120331e-05, |
|
"loss": 2.4435, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9459300039323636e-05, |
|
"loss": 2.3883, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9410145497443964e-05, |
|
"loss": 2.3303, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.93609909555643e-05, |
|
"loss": 2.286, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9311836413684626e-05, |
|
"loss": 2.2177, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9262681871804954e-05, |
|
"loss": 2.203, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.921352732992528e-05, |
|
"loss": 2.1681, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.916437278804562e-05, |
|
"loss": 2.1236, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.911521824616595e-05, |
|
"loss": 2.0806, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.906606370428628e-05, |
|
"loss": 2.0495, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.901690916240661e-05, |
|
"loss": 2.0479, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.896775462052694e-05, |
|
"loss": 2.0075, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.891860007864727e-05, |
|
"loss": 1.9711, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.88694455367676e-05, |
|
"loss": 1.9652, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.882029099488793e-05, |
|
"loss": 1.9282, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.877113645300826e-05, |
|
"loss": 1.906, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.872198191112859e-05, |
|
"loss": 1.8884, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.8672827369248916e-05, |
|
"loss": 1.8655, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.862367282736925e-05, |
|
"loss": 1.8642, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.8574518285489585e-05, |
|
"loss": 1.8338, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.852536374360991e-05, |
|
"loss": 1.8359, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.847620920173024e-05, |
|
"loss": 1.8034, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.8427054659850575e-05, |
|
"loss": 1.7909, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.83779001179709e-05, |
|
"loss": 1.7587, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.832874557609123e-05, |
|
"loss": 1.7676, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.8279591034211566e-05, |
|
"loss": 1.7414, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.8230436492331893e-05, |
|
"loss": 1.7267, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.818128195045222e-05, |
|
"loss": 1.7167, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.8132127408572556e-05, |
|
"loss": 1.7057, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.8082972866692884e-05, |
|
"loss": 1.6797, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.803381832481321e-05, |
|
"loss": 1.6798, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.7984663782933546e-05, |
|
"loss": 1.6704, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.7935509241053874e-05, |
|
"loss": 1.653, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.788635469917421e-05, |
|
"loss": 1.6541, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.783720015729454e-05, |
|
"loss": 1.6404, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7788045615414865e-05, |
|
"loss": 1.6296, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.77388910735352e-05, |
|
"loss": 1.6205, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.768973653165553e-05, |
|
"loss": 1.5994, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.7640581989775855e-05, |
|
"loss": 1.5933, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.759142744789619e-05, |
|
"loss": 1.5998, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.754227290601652e-05, |
|
"loss": 1.5913, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.7493118364136845e-05, |
|
"loss": 1.5682, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.744396382225718e-05, |
|
"loss": 1.5572, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.7394809280377515e-05, |
|
"loss": 1.5719, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.734565473849784e-05, |
|
"loss": 1.5551, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.729650019661817e-05, |
|
"loss": 1.5426, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.72473456547385e-05, |
|
"loss": 1.5426, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.719819111285883e-05, |
|
"loss": 1.5393, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.714903657097916e-05, |
|
"loss": 1.5167, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.709988202909949e-05, |
|
"loss": 1.5168, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.705072748721982e-05, |
|
"loss": 1.492, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.700157294534015e-05, |
|
"loss": 1.5187, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.695241840346048e-05, |
|
"loss": 1.491, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.690326386158081e-05, |
|
"loss": 1.4883, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.685410931970115e-05, |
|
"loss": 1.4935, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.6804954777821476e-05, |
|
"loss": 1.4733, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.6755800235941804e-05, |
|
"loss": 1.487, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.670664569406213e-05, |
|
"loss": 1.4625, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.6657491152182466e-05, |
|
"loss": 1.4589, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.6608336610302794e-05, |
|
"loss": 1.4498, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.655918206842312e-05, |
|
"loss": 1.4464, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.651002752654346e-05, |
|
"loss": 1.441, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.6460872984663785e-05, |
|
"loss": 1.4486, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.641171844278411e-05, |
|
"loss": 1.4283, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.636256390090444e-05, |
|
"loss": 1.4265, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.631340935902478e-05, |
|
"loss": 1.4173, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.626425481714511e-05, |
|
"loss": 1.4309, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.621510027526544e-05, |
|
"loss": 1.411, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.6165945733385765e-05, |
|
"loss": 1.4155, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.61167911915061e-05, |
|
"loss": 1.4097, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.606763664962643e-05, |
|
"loss": 1.3916, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.6018482107746756e-05, |
|
"loss": 1.399, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.596932756586709e-05, |
|
"loss": 1.3804, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.592017302398742e-05, |
|
"loss": 1.3847, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.5871018482107746e-05, |
|
"loss": 1.3797, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.5821863940228074e-05, |
|
"loss": 1.3729, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.5772709398348415e-05, |
|
"loss": 1.3667, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.572355485646874e-05, |
|
"loss": 1.3765, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.567440031458907e-05, |
|
"loss": 1.378, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.56252457727094e-05, |
|
"loss": 1.3574, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.5576091230829733e-05, |
|
"loss": 1.3604, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.552693668895006e-05, |
|
"loss": 1.3426, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.547778214707039e-05, |
|
"loss": 1.3398, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.5428627605190724e-05, |
|
"loss": 1.3466, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.537947306331105e-05, |
|
"loss": 1.3327, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.533031852143138e-05, |
|
"loss": 1.3253, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.528116397955171e-05, |
|
"loss": 1.337, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.523200943767204e-05, |
|
"loss": 1.3282, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.518285489579238e-05, |
|
"loss": 1.3378, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.5133700353912705e-05, |
|
"loss": 1.3306, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.508454581203303e-05, |
|
"loss": 1.3192, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.503539127015337e-05, |
|
"loss": 1.3191, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4986236728273695e-05, |
|
"loss": 1.316, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.493708218639402e-05, |
|
"loss": 1.3035, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.488792764451436e-05, |
|
"loss": 1.2951, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.4838773102634685e-05, |
|
"loss": 1.2945, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.478961856075501e-05, |
|
"loss": 1.2912, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.474046401887534e-05, |
|
"loss": 1.2787, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.4691309476995676e-05, |
|
"loss": 1.268, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.464215493511601e-05, |
|
"loss": 1.2773, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.459300039323634e-05, |
|
"loss": 1.2789, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.4543845851356666e-05, |
|
"loss": 1.2872, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.4494691309477e-05, |
|
"loss": 1.2806, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.444553676759733e-05, |
|
"loss": 1.265, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.4396382225717656e-05, |
|
"loss": 1.2772, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.434722768383799e-05, |
|
"loss": 1.2578, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.429807314195832e-05, |
|
"loss": 1.2662, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.424891860007865e-05, |
|
"loss": 1.2703, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.419976405819898e-05, |
|
"loss": 1.2594, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.415060951631931e-05, |
|
"loss": 1.2607, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.410145497443964e-05, |
|
"loss": 1.2501, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.405230043255997e-05, |
|
"loss": 1.2423, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.40031458906803e-05, |
|
"loss": 1.2518, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.3953991348800634e-05, |
|
"loss": 1.25, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.390483680692096e-05, |
|
"loss": 1.2464, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.385568226504129e-05, |
|
"loss": 1.2386, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.3806527723161625e-05, |
|
"loss": 1.2492, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.375737318128195e-05, |
|
"loss": 1.2396, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.370821863940228e-05, |
|
"loss": 1.2515, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.3659064097522615e-05, |
|
"loss": 1.2206, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.360990955564294e-05, |
|
"loss": 1.2247, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.356075501376327e-05, |
|
"loss": 1.2272, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.3511600471883605e-05, |
|
"loss": 1.2426, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.346244593000394e-05, |
|
"loss": 1.2121, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.341329138812427e-05, |
|
"loss": 1.2197, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.3364136846244596e-05, |
|
"loss": 1.2219, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.3314982304364923e-05, |
|
"loss": 1.1951, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.326582776248526e-05, |
|
"loss": 1.2068, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.3216673220605586e-05, |
|
"loss": 1.2066, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.3167518678725914e-05, |
|
"loss": 1.1981, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.311836413684625e-05, |
|
"loss": 1.2024, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.3069209594966576e-05, |
|
"loss": 1.2009, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.3020055053086904e-05, |
|
"loss": 1.2027, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.297090051120723e-05, |
|
"loss": 1.2003, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.2921745969327573e-05, |
|
"loss": 1.1934, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.28725914274479e-05, |
|
"loss": 1.2002, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.282343688556823e-05, |
|
"loss": 1.1969, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.277428234368856e-05, |
|
"loss": 1.1947, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.272512780180889e-05, |
|
"loss": 1.1822, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.267597325992922e-05, |
|
"loss": 1.1913, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.262681871804955e-05, |
|
"loss": 1.1953, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.257766417616988e-05, |
|
"loss": 1.1846, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.252850963429021e-05, |
|
"loss": 1.1956, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.247935509241054e-05, |
|
"loss": 1.1806, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.2430200550530866e-05, |
|
"loss": 1.1779, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.238104600865121e-05, |
|
"loss": 1.174, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.2331891466771535e-05, |
|
"loss": 1.1799, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.228273692489186e-05, |
|
"loss": 1.1598, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.223358238301219e-05, |
|
"loss": 1.1724, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.2184427841132525e-05, |
|
"loss": 1.1707, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.213527329925285e-05, |
|
"loss": 1.175, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.208611875737318e-05, |
|
"loss": 1.1588, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.2036964215493516e-05, |
|
"loss": 1.1796, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.1987809673613843e-05, |
|
"loss": 1.1678, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.193865513173417e-05, |
|
"loss": 1.1689, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.18895005898545e-05, |
|
"loss": 1.1536, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.1840346047974834e-05, |
|
"loss": 1.1578, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.179119150609517e-05, |
|
"loss": 1.1559, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.1742036964215496e-05, |
|
"loss": 1.1559, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.1692882422335824e-05, |
|
"loss": 1.1495, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.164372788045616e-05, |
|
"loss": 1.1531, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.159457333857649e-05, |
|
"loss": 1.1426, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.1545418796696815e-05, |
|
"loss": 1.1647, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.149626425481715e-05, |
|
"loss": 1.1457, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.144710971293748e-05, |
|
"loss": 1.1493, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.1397955171057805e-05, |
|
"loss": 1.1545, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.134880062917813e-05, |
|
"loss": 1.1392, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.129964608729847e-05, |
|
"loss": 1.1561, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.12504915454188e-05, |
|
"loss": 1.1363, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.120133700353913e-05, |
|
"loss": 1.1419, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.115218246165946e-05, |
|
"loss": 1.1408, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.110302791977979e-05, |
|
"loss": 1.1412, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.105387337790012e-05, |
|
"loss": 1.1396, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.100471883602045e-05, |
|
"loss": 1.1399, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.095556429414078e-05, |
|
"loss": 1.1394, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.090640975226111e-05, |
|
"loss": 1.1282, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.085725521038144e-05, |
|
"loss": 1.1286, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.0808100668501766e-05, |
|
"loss": 1.1295, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.07589461266221e-05, |
|
"loss": 1.132, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.070979158474243e-05, |
|
"loss": 1.1294, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.0660637042862763e-05, |
|
"loss": 1.1155, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.061148250098309e-05, |
|
"loss": 1.1194, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.0562327959103426e-05, |
|
"loss": 1.1236, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.0513173417223754e-05, |
|
"loss": 1.119, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.046401887534408e-05, |
|
"loss": 1.1285, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.0414864333464416e-05, |
|
"loss": 1.1138, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.0365709791584744e-05, |
|
"loss": 1.1138, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.031655524970507e-05, |
|
"loss": 1.1217, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.026740070782541e-05, |
|
"loss": 1.1228, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.0218246165945735e-05, |
|
"loss": 1.121, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.016909162406606e-05, |
|
"loss": 1.114, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.01199370821864e-05, |
|
"loss": 1.0939, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.0070782540306725e-05, |
|
"loss": 1.1102, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.002162799842706e-05, |
|
"loss": 1.1052, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.997247345654739e-05, |
|
"loss": 1.1097, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.9923318914667715e-05, |
|
"loss": 1.0889, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.987416437278805e-05, |
|
"loss": 1.0865, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.982500983090838e-05, |
|
"loss": 1.0952, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.9775855289028706e-05, |
|
"loss": 1.0877, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.972670074714904e-05, |
|
"loss": 1.0977, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.967754620526937e-05, |
|
"loss": 1.0931, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.9628391663389696e-05, |
|
"loss": 1.0867, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.957923712151003e-05, |
|
"loss": 1.0886, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.9530082579630365e-05, |
|
"loss": 1.0922, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.948092803775069e-05, |
|
"loss": 1.0695, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.943177349587102e-05, |
|
"loss": 1.0791, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.938261895399135e-05, |
|
"loss": 1.0952, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.9333464412111683e-05, |
|
"loss": 1.0809, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.928430987023201e-05, |
|
"loss": 1.0857, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.923515532835234e-05, |
|
"loss": 1.0904, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.9186000786472674e-05, |
|
"loss": 1.075, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.9136846244593e-05, |
|
"loss": 1.0758, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.908769170271333e-05, |
|
"loss": 1.0709, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.903853716083366e-05, |
|
"loss": 1.0911, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.8989382618954e-05, |
|
"loss": 1.0792, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.894022807707433e-05, |
|
"loss": 1.0647, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.8891073535194655e-05, |
|
"loss": 1.075, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.884191899331498e-05, |
|
"loss": 1.0679, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.879276445143532e-05, |
|
"loss": 1.0721, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.8743609909555645e-05, |
|
"loss": 1.0644, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.869445536767597e-05, |
|
"loss": 1.065, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.864530082579631e-05, |
|
"loss": 1.0658, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.8596146283916635e-05, |
|
"loss": 1.0685, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.854699174203696e-05, |
|
"loss": 1.0833, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.849783720015729e-05, |
|
"loss": 1.0584, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.844868265827763e-05, |
|
"loss": 1.0713, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 3.839952811639796e-05, |
|
"loss": 1.0549, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 3.835037357451829e-05, |
|
"loss": 1.0563, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 3.8301219032638616e-05, |
|
"loss": 1.0497, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 3.825206449075895e-05, |
|
"loss": 1.062, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 3.820290994887928e-05, |
|
"loss": 1.0632, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 3.8153755406999606e-05, |
|
"loss": 1.0727, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 3.810460086511994e-05, |
|
"loss": 1.0598, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 3.805544632324027e-05, |
|
"loss": 1.0472, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 3.80062917813606e-05, |
|
"loss": 1.0476, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 3.7957137239480925e-05, |
|
"loss": 1.0586, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 3.790798269760126e-05, |
|
"loss": 1.0544, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 3.7858828155721594e-05, |
|
"loss": 1.0625, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 3.780967361384192e-05, |
|
"loss": 1.0521, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.776051907196225e-05, |
|
"loss": 1.0443, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.7711364530082584e-05, |
|
"loss": 1.0614, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.766220998820291e-05, |
|
"loss": 1.0455, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.761305544632324e-05, |
|
"loss": 1.0448, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 3.7563900904443575e-05, |
|
"loss": 1.0527, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.75147463625639e-05, |
|
"loss": 1.0393, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 3.746559182068423e-05, |
|
"loss": 1.0405, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.741643727880456e-05, |
|
"loss": 1.0341, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 3.736728273692489e-05, |
|
"loss": 1.0404, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 3.731812819504523e-05, |
|
"loss": 1.0347, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 3.7268973653165555e-05, |
|
"loss": 1.0495, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 3.721981911128588e-05, |
|
"loss": 1.0613, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 3.717066456940622e-05, |
|
"loss": 1.0439, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 3.7121510027526546e-05, |
|
"loss": 1.0346, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 3.7072355485646873e-05, |
|
"loss": 1.0467, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 3.702320094376721e-05, |
|
"loss": 1.0357, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 3.6974046401887536e-05, |
|
"loss": 1.037, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 3.6924891860007864e-05, |
|
"loss": 1.04, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 3.687573731812819e-05, |
|
"loss": 1.0377, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 3.6826582776248526e-05, |
|
"loss": 1.037, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 3.6777428234368854e-05, |
|
"loss": 1.0349, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 3.672827369248919e-05, |
|
"loss": 1.0344, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 3.667911915060952e-05, |
|
"loss": 1.0398, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 3.662996460872985e-05, |
|
"loss": 1.0351, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 3.658081006685018e-05, |
|
"loss": 1.0319, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 3.653165552497051e-05, |
|
"loss": 1.0313, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 3.648250098309084e-05, |
|
"loss": 1.0188, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 3.643334644121117e-05, |
|
"loss": 1.0337, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 3.63841918993315e-05, |
|
"loss": 1.0237, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 3.633503735745183e-05, |
|
"loss": 1.019, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.628588281557216e-05, |
|
"loss": 1.0349, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.623672827369249e-05, |
|
"loss": 1.0288, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.618757373181282e-05, |
|
"loss": 1.0249, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.613841918993316e-05, |
|
"loss": 1.0196, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6089264648053485e-05, |
|
"loss": 1.0194, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.604011010617381e-05, |
|
"loss": 1.0231, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.599095556429414e-05, |
|
"loss": 1.023, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.5941801022414475e-05, |
|
"loss": 1.0237, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.58926464805348e-05, |
|
"loss": 1.0143, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.584349193865513e-05, |
|
"loss": 1.0362, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.5794337396775466e-05, |
|
"loss": 1.0177, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.5745182854895793e-05, |
|
"loss": 1.0137, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.569602831301612e-05, |
|
"loss": 1.0201, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.564687377113645e-05, |
|
"loss": 1.0108, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.559771922925679e-05, |
|
"loss": 1.0214, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.554856468737712e-05, |
|
"loss": 1.0192, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.5499410145497446e-05, |
|
"loss": 1.0106, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.5450255603617774e-05, |
|
"loss": 1.007, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.540110106173811e-05, |
|
"loss": 1.0105, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.535194651985844e-05, |
|
"loss": 1.01, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 3.5302791977978765e-05, |
|
"loss": 0.9991, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.52536374360991e-05, |
|
"loss": 1.0076, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 3.520448289421943e-05, |
|
"loss": 0.9889, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.5155328352339755e-05, |
|
"loss": 1.0086, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.510617381046008e-05, |
|
"loss": 0.9982, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.5057019268580424e-05, |
|
"loss": 1.0239, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.500786472670075e-05, |
|
"loss": 1.0058, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 3.495871018482108e-05, |
|
"loss": 0.9919, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.490955564294141e-05, |
|
"loss": 0.9908, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.486040110106174e-05, |
|
"loss": 0.9909, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 3.481124655918207e-05, |
|
"loss": 0.9875, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.47620920173024e-05, |
|
"loss": 0.9964, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 3.471293747542273e-05, |
|
"loss": 1.0046, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.466378293354306e-05, |
|
"loss": 0.9902, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.461462839166339e-05, |
|
"loss": 0.9969, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.4565473849783716e-05, |
|
"loss": 0.9927, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.451631930790405e-05, |
|
"loss": 0.982, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.4467164766024386e-05, |
|
"loss": 0.9925, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.4418010224144713e-05, |
|
"loss": 0.9998, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.436885568226504e-05, |
|
"loss": 0.9885, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 3.4319701140385376e-05, |
|
"loss": 0.9937, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.4270546598505704e-05, |
|
"loss": 0.994, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 3.422139205662603e-05, |
|
"loss": 0.9903, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.4172237514746366e-05, |
|
"loss": 0.9833, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.4123082972866694e-05, |
|
"loss": 0.9885, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 3.407392843098702e-05, |
|
"loss": 0.9767, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.402477388910735e-05, |
|
"loss": 0.9803, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.3975619347227685e-05, |
|
"loss": 0.9805, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 3.392646480534802e-05, |
|
"loss": 0.989, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.387731026346835e-05, |
|
"loss": 0.9912, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.3828155721588675e-05, |
|
"loss": 0.9862, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.377900117970901e-05, |
|
"loss": 0.9749, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.372984663782934e-05, |
|
"loss": 0.9795, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 3.3680692095949665e-05, |
|
"loss": 0.9771, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.363153755407e-05, |
|
"loss": 0.9764, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 3.358238301219033e-05, |
|
"loss": 0.9857, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.3533228470310656e-05, |
|
"loss": 0.9892, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.3484073928430984e-05, |
|
"loss": 0.9787, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 3.343491938655132e-05, |
|
"loss": 0.9915, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.338576484467165e-05, |
|
"loss": 0.9666, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 3.333661030279198e-05, |
|
"loss": 0.9831, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.328745576091231e-05, |
|
"loss": 0.9747, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.323830121903264e-05, |
|
"loss": 0.9757, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 3.318914667715297e-05, |
|
"loss": 0.9724, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.31399921352733e-05, |
|
"loss": 0.9753, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.3090837593393633e-05, |
|
"loss": 0.979, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.304168305151396e-05, |
|
"loss": 0.973, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.299252850963429e-05, |
|
"loss": 0.9694, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 3.2943373967754624e-05, |
|
"loss": 0.9577, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.289421942587495e-05, |
|
"loss": 0.971, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 3.284506488399528e-05, |
|
"loss": 0.9624, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.2795910342115614e-05, |
|
"loss": 0.9716, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 3.274675580023594e-05, |
|
"loss": 0.9722, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 3.269760125835628e-05, |
|
"loss": 0.9705, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.2648446716476605e-05, |
|
"loss": 0.9772, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 3.259929217459693e-05, |
|
"loss": 0.972, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.255013763271727e-05, |
|
"loss": 0.959, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.2500983090837595e-05, |
|
"loss": 0.9589, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 3.245182854895792e-05, |
|
"loss": 0.9893, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 3.240267400707826e-05, |
|
"loss": 0.9634, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 3.2353519465198585e-05, |
|
"loss": 0.9634, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.230436492331891e-05, |
|
"loss": 0.9689, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 3.225521038143925e-05, |
|
"loss": 0.9702, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 3.220605583955958e-05, |
|
"loss": 0.9681, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.215690129767991e-05, |
|
"loss": 0.9618, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.210774675580024e-05, |
|
"loss": 0.9647, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.2058592213920566e-05, |
|
"loss": 0.967, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.20094376720409e-05, |
|
"loss": 0.9672, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.196028313016123e-05, |
|
"loss": 0.9629, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.1911128588281556e-05, |
|
"loss": 0.9462, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.186197404640189e-05, |
|
"loss": 0.9627, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.181281950452222e-05, |
|
"loss": 0.9598, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.176366496264255e-05, |
|
"loss": 0.9721, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.1714510420762875e-05, |
|
"loss": 0.9611, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.1665355878883216e-05, |
|
"loss": 0.9586, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.1616201337003544e-05, |
|
"loss": 0.9655, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.156704679512387e-05, |
|
"loss": 0.961, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.15178922532442e-05, |
|
"loss": 0.9581, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.1468737711364534e-05, |
|
"loss": 0.9596, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.141958316948486e-05, |
|
"loss": 0.9639, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.137042862760519e-05, |
|
"loss": 0.9501, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.1321274085725525e-05, |
|
"loss": 0.9481, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.127211954384585e-05, |
|
"loss": 0.9452, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.122296500196618e-05, |
|
"loss": 0.9524, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.117381046008651e-05, |
|
"loss": 0.9552, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.112465591820685e-05, |
|
"loss": 0.9461, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.107550137632718e-05, |
|
"loss": 0.9513, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.1026346834447505e-05, |
|
"loss": 0.9568, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.097719229256783e-05, |
|
"loss": 0.9367, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.092803775068817e-05, |
|
"loss": 0.9545, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.0878883208808496e-05, |
|
"loss": 0.9552, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 3.0829728666928824e-05, |
|
"loss": 0.963, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.078057412504916e-05, |
|
"loss": 0.9411, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.0731419583169486e-05, |
|
"loss": 0.9481, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.0682265041289814e-05, |
|
"loss": 0.9341, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 3.063311049941014e-05, |
|
"loss": 0.9566, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.0583955957530476e-05, |
|
"loss": 0.9554, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 3.053480141565081e-05, |
|
"loss": 0.9637, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.048564687377114e-05, |
|
"loss": 0.9493, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.043649233189147e-05, |
|
"loss": 0.9574, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 3.0387337790011798e-05, |
|
"loss": 0.9536, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.033818324813213e-05, |
|
"loss": 0.9492, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 3.028902870625246e-05, |
|
"loss": 0.9514, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.023987416437279e-05, |
|
"loss": 0.9426, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 3.019071962249312e-05, |
|
"loss": 0.9471, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.0141565080613447e-05, |
|
"loss": 0.9454, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.009241053873378e-05, |
|
"loss": 0.9401, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 3.004325599685411e-05, |
|
"loss": 0.954, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 2.9994101454974445e-05, |
|
"loss": 0.9383, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 2.9944946913094772e-05, |
|
"loss": 0.9322, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 2.9895792371215104e-05, |
|
"loss": 0.9393, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 2.984663782933543e-05, |
|
"loss": 0.929, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 2.9797483287455763e-05, |
|
"loss": 0.937, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 2.9748328745576094e-05, |
|
"loss": 0.9368, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.9699174203696422e-05, |
|
"loss": 0.926, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 2.9650019661816753e-05, |
|
"loss": 0.923, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 2.960086511993708e-05, |
|
"loss": 0.9243, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 2.9551710578057412e-05, |
|
"loss": 0.9342, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 2.9502556036177744e-05, |
|
"loss": 0.9409, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 2.945340149429807e-05, |
|
"loss": 0.9253, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 2.9404246952418406e-05, |
|
"loss": 0.9314, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 2.9355092410538737e-05, |
|
"loss": 0.9374, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 2.930593786865907e-05, |
|
"loss": 0.9401, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 2.9256783326779396e-05, |
|
"loss": 0.9256, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 2.9207628784899728e-05, |
|
"loss": 0.93, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 2.9158474243020055e-05, |
|
"loss": 0.9268, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 2.9109319701140387e-05, |
|
"loss": 0.9367, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 2.9060165159260715e-05, |
|
"loss": 0.9225, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 2.9011010617381046e-05, |
|
"loss": 0.9264, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 2.8961856075501377e-05, |
|
"loss": 0.9289, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 2.8912701533621705e-05, |
|
"loss": 0.9337, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 2.886354699174204e-05, |
|
"loss": 0.925, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 2.881439244986237e-05, |
|
"loss": 0.9501, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 2.8765237907982702e-05, |
|
"loss": 0.9232, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 2.871608336610303e-05, |
|
"loss": 0.9317, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 2.866692882422336e-05, |
|
"loss": 0.9341, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 2.861777428234369e-05, |
|
"loss": 0.9314, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 2.856861974046402e-05, |
|
"loss": 0.9284, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 2.8519465198584348e-05, |
|
"loss": 0.9287, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 2.847031065670468e-05, |
|
"loss": 0.9219, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 2.842115611482501e-05, |
|
"loss": 0.9336, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 2.837200157294534e-05, |
|
"loss": 0.9202, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 2.832284703106567e-05, |
|
"loss": 0.9282, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 2.8273692489186004e-05, |
|
"loss": 0.9189, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 2.8224537947306336e-05, |
|
"loss": 0.9112, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 2.8175383405426664e-05, |
|
"loss": 0.9356, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 2.8126228863546995e-05, |
|
"loss": 0.9301, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.8077074321667323e-05, |
|
"loss": 0.9275, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 2.8027919779787654e-05, |
|
"loss": 0.9387, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 2.7978765237907985e-05, |
|
"loss": 0.9155, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 2.7929610696028313e-05, |
|
"loss": 0.9254, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 2.7880456154148644e-05, |
|
"loss": 0.9316, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 2.7831301612268972e-05, |
|
"loss": 0.9216, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 2.7782147070389303e-05, |
|
"loss": 0.9174, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 2.7732992528509638e-05, |
|
"loss": 0.9299, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 2.768383798662997e-05, |
|
"loss": 0.9068, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 2.7634683444750297e-05, |
|
"loss": 0.9211, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 2.758552890287063e-05, |
|
"loss": 0.9258, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 2.7536374360990956e-05, |
|
"loss": 0.9127, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 2.7487219819111287e-05, |
|
"loss": 0.9221, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 2.743806527723162e-05, |
|
"loss": 0.9136, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 2.7388910735351947e-05, |
|
"loss": 0.9226, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 2.7339756193472278e-05, |
|
"loss": 0.9116, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 2.7290601651592606e-05, |
|
"loss": 0.9146, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 2.7241447109712937e-05, |
|
"loss": 0.919, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 2.719229256783327e-05, |
|
"loss": 0.9204, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 2.7143138025953603e-05, |
|
"loss": 0.9054, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 2.709398348407393e-05, |
|
"loss": 0.9276, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 2.7044828942194262e-05, |
|
"loss": 0.9172, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 2.699567440031459e-05, |
|
"loss": 0.9182, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 2.694651985843492e-05, |
|
"loss": 0.9263, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 2.6897365316555252e-05, |
|
"loss": 0.9243, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 2.684821077467558e-05, |
|
"loss": 0.9201, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.679905623279591e-05, |
|
"loss": 0.9056, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.674990169091624e-05, |
|
"loss": 0.9063, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.670074714903657e-05, |
|
"loss": 0.9132, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.66515926071569e-05, |
|
"loss": 0.9093, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.6602438065277236e-05, |
|
"loss": 0.9244, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 2.6553283523397564e-05, |
|
"loss": 0.9193, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.6504128981517895e-05, |
|
"loss": 0.9145, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.6454974439638223e-05, |
|
"loss": 0.9126, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 2.6405819897758555e-05, |
|
"loss": 0.9158, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 2.6356665355878886e-05, |
|
"loss": 0.9091, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 2.6307510813999214e-05, |
|
"loss": 0.9016, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 2.6258356272119545e-05, |
|
"loss": 0.9103, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.6209201730239873e-05, |
|
"loss": 0.9037, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.6160047188360204e-05, |
|
"loss": 0.9009, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 2.6110892646480535e-05, |
|
"loss": 0.9105, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.606173810460087e-05, |
|
"loss": 0.8999, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 2.6012583562721198e-05, |
|
"loss": 0.8918, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 2.596342902084153e-05, |
|
"loss": 0.912, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 2.5914274478961857e-05, |
|
"loss": 0.9022, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 2.5865119937082188e-05, |
|
"loss": 0.9088, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 2.581596539520252e-05, |
|
"loss": 0.9229, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 2.5766810853322847e-05, |
|
"loss": 0.9173, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 2.571765631144318e-05, |
|
"loss": 0.9114, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 2.5668501769563506e-05, |
|
"loss": 0.919, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 2.5619347227683838e-05, |
|
"loss": 0.9007, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.557019268580417e-05, |
|
"loss": 0.9098, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.5521038143924497e-05, |
|
"loss": 0.8994, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 2.547188360204483e-05, |
|
"loss": 0.9034, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 2.5422729060165163e-05, |
|
"loss": 0.9096, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 2.5373574518285494e-05, |
|
"loss": 0.9001, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 2.5324419976405822e-05, |
|
"loss": 0.8986, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 2.5275265434526153e-05, |
|
"loss": 0.9168, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 2.522611089264648e-05, |
|
"loss": 0.9086, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 2.5176956350766812e-05, |
|
"loss": 0.9067, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.512780180888714e-05, |
|
"loss": 0.9047, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 2.507864726700747e-05, |
|
"loss": 0.8987, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 2.5029492725127802e-05, |
|
"loss": 0.897, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.4980338183248134e-05, |
|
"loss": 0.8901, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 2.4931183641368465e-05, |
|
"loss": 0.9015, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 2.4882029099488793e-05, |
|
"loss": 0.9002, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 2.4832874557609124e-05, |
|
"loss": 0.9065, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 2.4783720015729455e-05, |
|
"loss": 0.9017, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 2.4734565473849787e-05, |
|
"loss": 0.8924, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 2.4685410931970114e-05, |
|
"loss": 0.8953, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 2.4636256390090446e-05, |
|
"loss": 0.8832, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 2.4587101848210774e-05, |
|
"loss": 0.8899, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 2.4537947306331105e-05, |
|
"loss": 0.9004, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 2.4488792764451436e-05, |
|
"loss": 0.8799, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 2.4439638222571767e-05, |
|
"loss": 0.8888, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 2.43904836806921e-05, |
|
"loss": 0.8905, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 2.4341329138812426e-05, |
|
"loss": 0.8936, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 2.4292174596932758e-05, |
|
"loss": 0.8914, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 2.4243020055053085e-05, |
|
"loss": 0.8862, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 2.419386551317342e-05, |
|
"loss": 0.8883, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 2.4144710971293748e-05, |
|
"loss": 0.8886, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 2.409555642941408e-05, |
|
"loss": 0.8875, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 2.404640188753441e-05, |
|
"loss": 0.8873, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 2.399724734565474e-05, |
|
"loss": 0.8994, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 2.394809280377507e-05, |
|
"loss": 0.8865, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 2.38989382618954e-05, |
|
"loss": 0.8915, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 2.3849783720015732e-05, |
|
"loss": 0.8944, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 2.380062917813606e-05, |
|
"loss": 0.8924, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 2.375147463625639e-05, |
|
"loss": 0.8834, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 2.370232009437672e-05, |
|
"loss": 0.8867, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 2.3653165552497054e-05, |
|
"loss": 0.8953, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 2.360401101061738e-05, |
|
"loss": 0.8916, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 2.3554856468737713e-05, |
|
"loss": 0.8916, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 2.3505701926858044e-05, |
|
"loss": 0.894, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 2.3456547384978372e-05, |
|
"loss": 0.8907, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 2.3407392843098703e-05, |
|
"loss": 0.8874, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 2.3358238301219034e-05, |
|
"loss": 0.88, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 2.3309083759339366e-05, |
|
"loss": 0.8946, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 2.3259929217459694e-05, |
|
"loss": 0.9026, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.3210774675580025e-05, |
|
"loss": 0.8955, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 2.3161620133700353e-05, |
|
"loss": 0.8811, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 2.3112465591820687e-05, |
|
"loss": 0.8919, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 2.3063311049941015e-05, |
|
"loss": 0.8863, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 2.3014156508061346e-05, |
|
"loss": 0.8876, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 2.2965001966181678e-05, |
|
"loss": 0.8878, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 2.2915847424302005e-05, |
|
"loss": 0.8874, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 2.2866692882422337e-05, |
|
"loss": 0.8857, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 2.2817538340542668e-05, |
|
"loss": 0.895, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 2.2768383798663e-05, |
|
"loss": 0.8881, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 2.2719229256783327e-05, |
|
"loss": 0.8941, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 2.267007471490366e-05, |
|
"loss": 0.8857, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 2.2620920173023986e-05, |
|
"loss": 0.8874, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 2.2571765631144317e-05, |
|
"loss": 0.8857, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 2.252261108926465e-05, |
|
"loss": 0.8805, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 2.247345654738498e-05, |
|
"loss": 0.8782, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 2.242430200550531e-05, |
|
"loss": 0.8921, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 2.237514746362564e-05, |
|
"loss": 0.8851, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 2.232599292174597e-05, |
|
"loss": 0.8806, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 2.2276838379866298e-05, |
|
"loss": 0.882, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 2.2227683837986633e-05, |
|
"loss": 0.888, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 2.217852929610696e-05, |
|
"loss": 0.8936, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 2.2129374754227292e-05, |
|
"loss": 0.8964, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 2.2080220212347623e-05, |
|
"loss": 0.8976, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 2.203106567046795e-05, |
|
"loss": 0.8836, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 2.1981911128588282e-05, |
|
"loss": 0.8904, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 2.1932756586708614e-05, |
|
"loss": 0.8879, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 2.1883602044828945e-05, |
|
"loss": 0.8894, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 2.1834447502949273e-05, |
|
"loss": 0.8799, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 2.1785292961069604e-05, |
|
"loss": 0.8779, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 2.1736138419189932e-05, |
|
"loss": 0.8816, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 2.1686983877310266e-05, |
|
"loss": 0.8815, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 2.1637829335430594e-05, |
|
"loss": 0.8853, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 2.1588674793550925e-05, |
|
"loss": 0.886, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 2.1539520251671257e-05, |
|
"loss": 0.8881, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 2.1490365709791585e-05, |
|
"loss": 0.8789, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 2.1441211167911916e-05, |
|
"loss": 0.8827, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 2.1392056626032247e-05, |
|
"loss": 0.8844, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 2.134290208415258e-05, |
|
"loss": 0.8807, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 2.1293747542272906e-05, |
|
"loss": 0.8789, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 2.1244593000393237e-05, |
|
"loss": 0.8808, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 2.1195438458513565e-05, |
|
"loss": 0.8763, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 2.1146283916633897e-05, |
|
"loss": 0.8827, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 2.1097129374754228e-05, |
|
"loss": 0.8749, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 2.104797483287456e-05, |
|
"loss": 0.8746, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 2.099882029099489e-05, |
|
"loss": 0.8833, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 2.0949665749115218e-05, |
|
"loss": 0.8855, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 2.090051120723555e-05, |
|
"loss": 0.8844, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 2.085135666535588e-05, |
|
"loss": 0.8829, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 2.0802202123476212e-05, |
|
"loss": 0.8842, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 2.075304758159654e-05, |
|
"loss": 0.8749, |
|
"step": 297500 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 2.070389303971687e-05, |
|
"loss": 0.8741, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 2.06547384978372e-05, |
|
"loss": 0.8771, |
|
"step": 298500 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 2.060558395595753e-05, |
|
"loss": 0.8662, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 2.055642941407786e-05, |
|
"loss": 0.8765, |
|
"step": 299500 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 2.0507274872198193e-05, |
|
"loss": 0.8746, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 2.0458120330318524e-05, |
|
"loss": 0.8781, |
|
"step": 300500 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 2.0408965788438852e-05, |
|
"loss": 0.8693, |
|
"step": 301000 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 2.0359811246559183e-05, |
|
"loss": 0.8664, |
|
"step": 301500 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 2.031065670467951e-05, |
|
"loss": 0.88, |
|
"step": 302000 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 2.0261502162799845e-05, |
|
"loss": 0.8883, |
|
"step": 302500 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 2.0212347620920173e-05, |
|
"loss": 0.8749, |
|
"step": 303000 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 2.0163193079040505e-05, |
|
"loss": 0.8776, |
|
"step": 303500 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 2.0114038537160836e-05, |
|
"loss": 0.8703, |
|
"step": 304000 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 2.0064883995281164e-05, |
|
"loss": 0.8707, |
|
"step": 304500 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 2.0015729453401495e-05, |
|
"loss": 0.8787, |
|
"step": 305000 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 1.9966574911521826e-05, |
|
"loss": 0.851, |
|
"step": 305500 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 1.9917420369642157e-05, |
|
"loss": 0.8621, |
|
"step": 306000 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 1.9868265827762485e-05, |
|
"loss": 0.8738, |
|
"step": 306500 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 1.9819111285882817e-05, |
|
"loss": 0.8811, |
|
"step": 307000 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 1.9769956744003144e-05, |
|
"loss": 0.8708, |
|
"step": 307500 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 1.972080220212348e-05, |
|
"loss": 0.8619, |
|
"step": 308000 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 1.9671647660243807e-05, |
|
"loss": 0.8724, |
|
"step": 308500 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 1.9622493118364138e-05, |
|
"loss": 0.8707, |
|
"step": 309000 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.957333857648447e-05, |
|
"loss": 0.869, |
|
"step": 309500 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 1.9524184034604797e-05, |
|
"loss": 0.868, |
|
"step": 310000 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 1.947502949272513e-05, |
|
"loss": 0.8664, |
|
"step": 310500 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 1.942587495084546e-05, |
|
"loss": 0.8612, |
|
"step": 311000 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 1.937672040896579e-05, |
|
"loss": 0.8683, |
|
"step": 311500 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 1.932756586708612e-05, |
|
"loss": 0.8689, |
|
"step": 312000 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 1.927841132520645e-05, |
|
"loss": 0.8764, |
|
"step": 312500 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 1.9229256783326778e-05, |
|
"loss": 0.8652, |
|
"step": 313000 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 1.918010224144711e-05, |
|
"loss": 0.8635, |
|
"step": 313500 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 1.913094769956744e-05, |
|
"loss": 0.8757, |
|
"step": 314000 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 1.9081793157687772e-05, |
|
"loss": 0.8671, |
|
"step": 314500 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 1.9032638615808103e-05, |
|
"loss": 0.8652, |
|
"step": 315000 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 1.898348407392843e-05, |
|
"loss": 0.8696, |
|
"step": 315500 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 1.8934329532048762e-05, |
|
"loss": 0.8532, |
|
"step": 316000 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 1.8885174990169093e-05, |
|
"loss": 0.8579, |
|
"step": 316500 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 1.8836020448289425e-05, |
|
"loss": 0.864, |
|
"step": 317000 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 1.8786865906409752e-05, |
|
"loss": 0.8615, |
|
"step": 317500 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 1.8737711364530084e-05, |
|
"loss": 0.8663, |
|
"step": 318000 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 1.868855682265041e-05, |
|
"loss": 0.859, |
|
"step": 318500 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 1.8639402280770743e-05, |
|
"loss": 0.8673, |
|
"step": 319000 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 1.8590247738891074e-05, |
|
"loss": 0.8673, |
|
"step": 319500 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 1.8541093197011405e-05, |
|
"loss": 0.8693, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 1.8491938655131737e-05, |
|
"loss": 0.867, |
|
"step": 320500 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 1.8442784113252064e-05, |
|
"loss": 0.8635, |
|
"step": 321000 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 1.8393629571372396e-05, |
|
"loss": 0.8611, |
|
"step": 321500 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 1.8344475029492724e-05, |
|
"loss": 0.8657, |
|
"step": 322000 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 1.8295320487613058e-05, |
|
"loss": 0.8595, |
|
"step": 322500 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 1.8246165945733386e-05, |
|
"loss": 0.8592, |
|
"step": 323000 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 1.8197011403853717e-05, |
|
"loss": 0.8648, |
|
"step": 323500 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 1.814785686197405e-05, |
|
"loss": 0.8611, |
|
"step": 324000 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 1.8098702320094376e-05, |
|
"loss": 0.869, |
|
"step": 324500 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 1.8049547778214708e-05, |
|
"loss": 0.8639, |
|
"step": 325000 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 1.800039323633504e-05, |
|
"loss": 0.8619, |
|
"step": 325500 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 1.795123869445537e-05, |
|
"loss": 0.8499, |
|
"step": 326000 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 1.7902084152575698e-05, |
|
"loss": 0.8649, |
|
"step": 326500 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 1.785292961069603e-05, |
|
"loss": 0.8617, |
|
"step": 327000 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 1.7803775068816357e-05, |
|
"loss": 0.8692, |
|
"step": 327500 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.7754620526936692e-05, |
|
"loss": 0.8529, |
|
"step": 328000 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 1.770546598505702e-05, |
|
"loss": 0.8672, |
|
"step": 328500 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.765631144317735e-05, |
|
"loss": 0.8536, |
|
"step": 329000 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 1.7607156901297682e-05, |
|
"loss": 0.8471, |
|
"step": 329500 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.755800235941801e-05, |
|
"loss": 0.8562, |
|
"step": 330000 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 1.750884781753834e-05, |
|
"loss": 0.856, |
|
"step": 330500 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 1.7459693275658672e-05, |
|
"loss": 0.87, |
|
"step": 331000 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.7410538733779004e-05, |
|
"loss": 0.8662, |
|
"step": 331500 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 1.736138419189933e-05, |
|
"loss": 0.867, |
|
"step": 332000 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.7312229650019663e-05, |
|
"loss": 0.8535, |
|
"step": 332500 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 1.726307510813999e-05, |
|
"loss": 0.8589, |
|
"step": 333000 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.7213920566260322e-05, |
|
"loss": 0.8549, |
|
"step": 333500 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 1.7164766024380653e-05, |
|
"loss": 0.8739, |
|
"step": 334000 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.7115611482500984e-05, |
|
"loss": 0.849, |
|
"step": 334500 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 1.7066456940621316e-05, |
|
"loss": 0.8613, |
|
"step": 335000 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.7017302398741644e-05, |
|
"loss": 0.862, |
|
"step": 335500 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 1.6968147856861975e-05, |
|
"loss": 0.8658, |
|
"step": 336000 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 1.6918993314982306e-05, |
|
"loss": 0.8595, |
|
"step": 336500 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 1.6869838773102637e-05, |
|
"loss": 0.8511, |
|
"step": 337000 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 1.6820684231222965e-05, |
|
"loss": 0.8568, |
|
"step": 337500 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 1.6771529689343296e-05, |
|
"loss": 0.8589, |
|
"step": 338000 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.6722375147463624e-05, |
|
"loss": 0.859, |
|
"step": 338500 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.6673220605583956e-05, |
|
"loss": 0.8545, |
|
"step": 339000 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 1.6624066063704287e-05, |
|
"loss": 0.8615, |
|
"step": 339500 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.6574911521824618e-05, |
|
"loss": 0.8763, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.652575697994495e-05, |
|
"loss": 0.856, |
|
"step": 340500 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.6476602438065277e-05, |
|
"loss": 0.8626, |
|
"step": 341000 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.642744789618561e-05, |
|
"loss": 0.8596, |
|
"step": 341500 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 1.6378293354305936e-05, |
|
"loss": 0.8643, |
|
"step": 342000 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 1.632913881242627e-05, |
|
"loss": 0.8606, |
|
"step": 342500 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 1.62799842705466e-05, |
|
"loss": 0.8622, |
|
"step": 343000 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.623082972866693e-05, |
|
"loss": 0.8629, |
|
"step": 343500 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.618167518678726e-05, |
|
"loss": 0.8544, |
|
"step": 344000 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.613252064490759e-05, |
|
"loss": 0.865, |
|
"step": 344500 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 1.608336610302792e-05, |
|
"loss": 0.8574, |
|
"step": 345000 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.603421156114825e-05, |
|
"loss": 0.8657, |
|
"step": 345500 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 1.5985057019268583e-05, |
|
"loss": 0.8479, |
|
"step": 346000 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.593590247738891e-05, |
|
"loss": 0.8548, |
|
"step": 346500 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.5886747935509242e-05, |
|
"loss": 0.8485, |
|
"step": 347000 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.583759339362957e-05, |
|
"loss": 0.8518, |
|
"step": 347500 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.5788438851749904e-05, |
|
"loss": 0.8678, |
|
"step": 348000 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.5739284309870232e-05, |
|
"loss": 0.8575, |
|
"step": 348500 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.5690129767990564e-05, |
|
"loss": 0.8553, |
|
"step": 349000 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 1.5640975226110895e-05, |
|
"loss": 0.865, |
|
"step": 349500 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.5591820684231223e-05, |
|
"loss": 0.852, |
|
"step": 350000 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 1.5542666142351554e-05, |
|
"loss": 0.8435, |
|
"step": 350500 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 1.5493511600471885e-05, |
|
"loss": 0.8605, |
|
"step": 351000 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 1.5444357058592216e-05, |
|
"loss": 0.87, |
|
"step": 351500 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 1.5395202516712544e-05, |
|
"loss": 0.853, |
|
"step": 352000 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.5346047974832876e-05, |
|
"loss": 0.8627, |
|
"step": 352500 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 1.5296893432953203e-05, |
|
"loss": 0.8525, |
|
"step": 353000 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 1.5247738891073535e-05, |
|
"loss": 0.854, |
|
"step": 353500 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 1.5198584349193868e-05, |
|
"loss": 0.8404, |
|
"step": 354000 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.5149429807314197e-05, |
|
"loss": 0.8652, |
|
"step": 354500 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 1.5100275265434527e-05, |
|
"loss": 0.8651, |
|
"step": 355000 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 1.5051120723554856e-05, |
|
"loss": 0.859, |
|
"step": 355500 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 1.5001966181675187e-05, |
|
"loss": 0.865, |
|
"step": 356000 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.4952811639795517e-05, |
|
"loss": 0.8454, |
|
"step": 356500 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 1.4903657097915848e-05, |
|
"loss": 0.8597, |
|
"step": 357000 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 1.485450255603618e-05, |
|
"loss": 0.8388, |
|
"step": 357500 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 1.4805348014156509e-05, |
|
"loss": 0.8396, |
|
"step": 358000 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.4756193472276839e-05, |
|
"loss": 0.8523, |
|
"step": 358500 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 1.4707038930397168e-05, |
|
"loss": 0.8509, |
|
"step": 359000 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 1.4657884388517501e-05, |
|
"loss": 0.8461, |
|
"step": 359500 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 1.460872984663783e-05, |
|
"loss": 0.8475, |
|
"step": 360000 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 1.455957530475816e-05, |
|
"loss": 0.8356, |
|
"step": 360500 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 1.451042076287849e-05, |
|
"loss": 0.8534, |
|
"step": 361000 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 1.4461266220998821e-05, |
|
"loss": 0.8501, |
|
"step": 361500 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 1.441211167911915e-05, |
|
"loss": 0.8398, |
|
"step": 362000 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.4362957137239484e-05, |
|
"loss": 0.8452, |
|
"step": 362500 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 1.4313802595359813e-05, |
|
"loss": 0.8563, |
|
"step": 363000 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.4264648053480143e-05, |
|
"loss": 0.8546, |
|
"step": 363500 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 1.4215493511600472e-05, |
|
"loss": 0.8523, |
|
"step": 364000 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 1.4166338969720802e-05, |
|
"loss": 0.8452, |
|
"step": 364500 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.4117184427841131e-05, |
|
"loss": 0.8492, |
|
"step": 365000 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 1.4068029885961464e-05, |
|
"loss": 0.8446, |
|
"step": 365500 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 1.4018875344081794e-05, |
|
"loss": 0.8487, |
|
"step": 366000 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 1.3969720802202123e-05, |
|
"loss": 0.8431, |
|
"step": 366500 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 1.3920566260322455e-05, |
|
"loss": 0.8465, |
|
"step": 367000 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 1.3871411718442784e-05, |
|
"loss": 0.8453, |
|
"step": 367500 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 1.3822257176563117e-05, |
|
"loss": 0.8546, |
|
"step": 368000 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 1.3773102634683447e-05, |
|
"loss": 0.8458, |
|
"step": 368500 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 1.3723948092803776e-05, |
|
"loss": 0.8333, |
|
"step": 369000 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 1.3674793550924106e-05, |
|
"loss": 0.8514, |
|
"step": 369500 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 1.3625639009044435e-05, |
|
"loss": 0.8466, |
|
"step": 370000 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 1.3576484467164765e-05, |
|
"loss": 0.84, |
|
"step": 370500 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 1.3527329925285098e-05, |
|
"loss": 0.858, |
|
"step": 371000 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.3478175383405427e-05, |
|
"loss": 0.8472, |
|
"step": 371500 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 1.3429020841525759e-05, |
|
"loss": 0.8519, |
|
"step": 372000 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 1.3379866299646088e-05, |
|
"loss": 0.8513, |
|
"step": 372500 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 1.3330711757766418e-05, |
|
"loss": 0.8448, |
|
"step": 373000 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 1.3281557215886747e-05, |
|
"loss": 0.8454, |
|
"step": 373500 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 1.323240267400708e-05, |
|
"loss": 0.8542, |
|
"step": 374000 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 1.318324813212741e-05, |
|
"loss": 0.8522, |
|
"step": 374500 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 1.313409359024774e-05, |
|
"loss": 0.8451, |
|
"step": 375000 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 1.3084939048368069e-05, |
|
"loss": 0.8394, |
|
"step": 375500 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 1.30357845064884e-05, |
|
"loss": 0.8375, |
|
"step": 376000 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 1.298662996460873e-05, |
|
"loss": 0.8379, |
|
"step": 376500 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 1.2937475422729061e-05, |
|
"loss": 0.8507, |
|
"step": 377000 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 1.2888320880849392e-05, |
|
"loss": 0.8513, |
|
"step": 377500 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 1.2839166338969722e-05, |
|
"loss": 0.8489, |
|
"step": 378000 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 1.2790011797090051e-05, |
|
"loss": 0.8523, |
|
"step": 378500 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 1.2740857255210381e-05, |
|
"loss": 0.8444, |
|
"step": 379000 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 1.2691702713330714e-05, |
|
"loss": 0.8427, |
|
"step": 379500 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.2642548171451043e-05, |
|
"loss": 0.8465, |
|
"step": 380000 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 1.2593393629571373e-05, |
|
"loss": 0.8317, |
|
"step": 380500 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 1.2544239087691702e-05, |
|
"loss": 0.8336, |
|
"step": 381000 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.2495084545812034e-05, |
|
"loss": 0.8303, |
|
"step": 381500 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 1.2445930003932365e-05, |
|
"loss": 0.8456, |
|
"step": 382000 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 1.2396775462052695e-05, |
|
"loss": 0.8575, |
|
"step": 382500 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 1.2347620920173024e-05, |
|
"loss": 0.8468, |
|
"step": 383000 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 1.2298466378293355e-05, |
|
"loss": 0.8435, |
|
"step": 383500 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.2249311836413685e-05, |
|
"loss": 0.8425, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 1.2200157294534016e-05, |
|
"loss": 0.8421, |
|
"step": 384500 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 1.2151002752654346e-05, |
|
"loss": 0.8518, |
|
"step": 385000 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 1.2101848210774675e-05, |
|
"loss": 0.839, |
|
"step": 385500 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 1.2052693668895006e-05, |
|
"loss": 0.8344, |
|
"step": 386000 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 1.2003539127015336e-05, |
|
"loss": 0.8446, |
|
"step": 386500 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 1.1954384585135667e-05, |
|
"loss": 0.8314, |
|
"step": 387000 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 1.1905230043255999e-05, |
|
"loss": 0.8382, |
|
"step": 387500 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 1.1856075501376328e-05, |
|
"loss": 0.8527, |
|
"step": 388000 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 1.1806920959496658e-05, |
|
"loss": 0.8491, |
|
"step": 388500 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 1.1757766417616989e-05, |
|
"loss": 0.834, |
|
"step": 389000 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 1.1708611875737318e-05, |
|
"loss": 0.8415, |
|
"step": 389500 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 1.1659457333857648e-05, |
|
"loss": 0.8423, |
|
"step": 390000 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 1.161030279197798e-05, |
|
"loss": 0.8454, |
|
"step": 390500 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 1.1561148250098309e-05, |
|
"loss": 0.8562, |
|
"step": 391000 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 1.151199370821864e-05, |
|
"loss": 0.8516, |
|
"step": 391500 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 1.1462839166338971e-05, |
|
"loss": 0.8434, |
|
"step": 392000 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 1.1413684624459301e-05, |
|
"loss": 0.8353, |
|
"step": 392500 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 1.136453008257963e-05, |
|
"loss": 0.8447, |
|
"step": 393000 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 1.1315375540699962e-05, |
|
"loss": 0.8374, |
|
"step": 393500 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 1.1266220998820291e-05, |
|
"loss": 0.8374, |
|
"step": 394000 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 1.1217066456940622e-05, |
|
"loss": 0.8487, |
|
"step": 394500 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 1.1167911915060952e-05, |
|
"loss": 0.8475, |
|
"step": 395000 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 1.1118757373181282e-05, |
|
"loss": 0.8415, |
|
"step": 395500 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 1.1069602831301613e-05, |
|
"loss": 0.8424, |
|
"step": 396000 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 1.1020448289421944e-05, |
|
"loss": 0.8415, |
|
"step": 396500 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 1.0971293747542274e-05, |
|
"loss": 0.8466, |
|
"step": 397000 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 1.0922139205662605e-05, |
|
"loss": 0.834, |
|
"step": 397500 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 1.0872984663782934e-05, |
|
"loss": 0.8413, |
|
"step": 398000 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.0823830121903264e-05, |
|
"loss": 0.8476, |
|
"step": 398500 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 1.0774675580023595e-05, |
|
"loss": 0.8419, |
|
"step": 399000 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 1.0725521038143925e-05, |
|
"loss": 0.853, |
|
"step": 399500 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 1.0676366496264254e-05, |
|
"loss": 0.8368, |
|
"step": 400000 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 1.0627211954384586e-05, |
|
"loss": 0.8426, |
|
"step": 400500 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 1.0578057412504915e-05, |
|
"loss": 0.8566, |
|
"step": 401000 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 1.0528902870625246e-05, |
|
"loss": 0.8405, |
|
"step": 401500 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 1.0479748328745578e-05, |
|
"loss": 0.8338, |
|
"step": 402000 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 1.0430593786865907e-05, |
|
"loss": 0.8411, |
|
"step": 402500 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 1.0381439244986237e-05, |
|
"loss": 0.8457, |
|
"step": 403000 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 1.0332284703106568e-05, |
|
"loss": 0.8472, |
|
"step": 403500 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 1.0283130161226898e-05, |
|
"loss": 0.8283, |
|
"step": 404000 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 1.0233975619347227e-05, |
|
"loss": 0.8459, |
|
"step": 404500 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 1.0184821077467558e-05, |
|
"loss": 0.8401, |
|
"step": 405000 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 1.0135666535587888e-05, |
|
"loss": 0.8307, |
|
"step": 405500 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 1.008651199370822e-05, |
|
"loss": 0.8379, |
|
"step": 406000 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 1.003735745182855e-05, |
|
"loss": 0.8398, |
|
"step": 406500 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 9.98820290994888e-06, |
|
"loss": 0.8283, |
|
"step": 407000 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 9.939048368069211e-06, |
|
"loss": 0.8299, |
|
"step": 407500 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 9.88989382618954e-06, |
|
"loss": 0.8323, |
|
"step": 408000 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 9.84073928430987e-06, |
|
"loss": 0.8346, |
|
"step": 408500 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 9.791584742430202e-06, |
|
"loss": 0.8382, |
|
"step": 409000 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 9.742430200550531e-06, |
|
"loss": 0.8312, |
|
"step": 409500 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 9.69327565867086e-06, |
|
"loss": 0.8264, |
|
"step": 410000 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 9.644121116791192e-06, |
|
"loss": 0.8448, |
|
"step": 410500 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 9.594966574911521e-06, |
|
"loss": 0.8387, |
|
"step": 411000 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 9.545812033031853e-06, |
|
"loss": 0.8427, |
|
"step": 411500 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 9.496657491152184e-06, |
|
"loss": 0.8295, |
|
"step": 412000 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 9.447502949272514e-06, |
|
"loss": 0.8314, |
|
"step": 412500 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 9.398348407392843e-06, |
|
"loss": 0.8331, |
|
"step": 413000 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 9.349193865513174e-06, |
|
"loss": 0.8328, |
|
"step": 413500 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 9.300039323633504e-06, |
|
"loss": 0.8389, |
|
"step": 414000 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 9.250884781753833e-06, |
|
"loss": 0.8361, |
|
"step": 414500 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 9.201730239874165e-06, |
|
"loss": 0.8297, |
|
"step": 415000 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 9.152575697994494e-06, |
|
"loss": 0.8363, |
|
"step": 415500 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 9.103421156114826e-06, |
|
"loss": 0.8236, |
|
"step": 416000 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 9.054266614235157e-06, |
|
"loss": 0.8253, |
|
"step": 416500 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 9.005112072355486e-06, |
|
"loss": 0.8374, |
|
"step": 417000 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 8.955957530475818e-06, |
|
"loss": 0.8392, |
|
"step": 417500 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 8.906802988596147e-06, |
|
"loss": 0.8299, |
|
"step": 418000 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 8.857648446716477e-06, |
|
"loss": 0.8358, |
|
"step": 418500 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 8.808493904836808e-06, |
|
"loss": 0.8382, |
|
"step": 419000 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 8.759339362957137e-06, |
|
"loss": 0.8266, |
|
"step": 419500 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 8.710184821077467e-06, |
|
"loss": 0.8376, |
|
"step": 420000 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 8.661030279197798e-06, |
|
"loss": 0.8373, |
|
"step": 420500 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 8.611875737318128e-06, |
|
"loss": 0.8358, |
|
"step": 421000 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 8.562721195438459e-06, |
|
"loss": 0.8406, |
|
"step": 421500 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 8.51356665355879e-06, |
|
"loss": 0.8322, |
|
"step": 422000 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 8.46441211167912e-06, |
|
"loss": 0.8391, |
|
"step": 422500 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 8.41525756979945e-06, |
|
"loss": 0.8487, |
|
"step": 423000 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 8.36610302791978e-06, |
|
"loss": 0.8206, |
|
"step": 423500 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 8.31694848604011e-06, |
|
"loss": 0.8305, |
|
"step": 424000 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 8.26779394416044e-06, |
|
"loss": 0.8382, |
|
"step": 424500 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 8.218639402280771e-06, |
|
"loss": 0.8278, |
|
"step": 425000 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 8.1694848604011e-06, |
|
"loss": 0.8358, |
|
"step": 425500 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 8.120330318521432e-06, |
|
"loss": 0.8298, |
|
"step": 426000 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 8.071175776641763e-06, |
|
"loss": 0.8257, |
|
"step": 426500 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 8.022021234762093e-06, |
|
"loss": 0.8395, |
|
"step": 427000 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 7.972866692882424e-06, |
|
"loss": 0.8355, |
|
"step": 427500 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 7.923712151002753e-06, |
|
"loss": 0.8289, |
|
"step": 428000 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 7.874557609123083e-06, |
|
"loss": 0.844, |
|
"step": 428500 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 7.825403067243414e-06, |
|
"loss": 0.8313, |
|
"step": 429000 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 7.776248525363744e-06, |
|
"loss": 0.8367, |
|
"step": 429500 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 7.727093983484073e-06, |
|
"loss": 0.8315, |
|
"step": 430000 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 7.677939441604405e-06, |
|
"loss": 0.8373, |
|
"step": 430500 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 7.628784899724735e-06, |
|
"loss": 0.8369, |
|
"step": 431000 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 7.579630357845065e-06, |
|
"loss": 0.8307, |
|
"step": 431500 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 7.530475815965396e-06, |
|
"loss": 0.8268, |
|
"step": 432000 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 7.481321274085725e-06, |
|
"loss": 0.8307, |
|
"step": 432500 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 7.432166732206056e-06, |
|
"loss": 0.8254, |
|
"step": 433000 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 7.383012190326387e-06, |
|
"loss": 0.8402, |
|
"step": 433500 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 7.333857648446717e-06, |
|
"loss": 0.8292, |
|
"step": 434000 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 7.284703106567046e-06, |
|
"loss": 0.8263, |
|
"step": 434500 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 7.235548564687377e-06, |
|
"loss": 0.8418, |
|
"step": 435000 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 7.186394022807708e-06, |
|
"loss": 0.8297, |
|
"step": 435500 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 7.137239480928037e-06, |
|
"loss": 0.8327, |
|
"step": 436000 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 7.088084939048369e-06, |
|
"loss": 0.8392, |
|
"step": 436500 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 7.038930397168698e-06, |
|
"loss": 0.8328, |
|
"step": 437000 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 6.989775855289029e-06, |
|
"loss": 0.8278, |
|
"step": 437500 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 6.94062131340936e-06, |
|
"loss": 0.8306, |
|
"step": 438000 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 6.891466771529689e-06, |
|
"loss": 0.8278, |
|
"step": 438500 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 6.842312229650021e-06, |
|
"loss": 0.8322, |
|
"step": 439000 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 6.79315768777035e-06, |
|
"loss": 0.8287, |
|
"step": 439500 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 6.7440031458906806e-06, |
|
"loss": 0.8357, |
|
"step": 440000 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 6.694848604011012e-06, |
|
"loss": 0.8366, |
|
"step": 440500 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 6.645694062131341e-06, |
|
"loss": 0.8278, |
|
"step": 441000 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 6.596539520251671e-06, |
|
"loss": 0.8388, |
|
"step": 441500 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 6.547384978372002e-06, |
|
"loss": 0.8326, |
|
"step": 442000 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 6.4982304364923326e-06, |
|
"loss": 0.8309, |
|
"step": 442500 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 6.449075894612662e-06, |
|
"loss": 0.826, |
|
"step": 443000 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 6.399921352732993e-06, |
|
"loss": 0.838, |
|
"step": 443500 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 6.350766810853323e-06, |
|
"loss": 0.8418, |
|
"step": 444000 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 6.3016122689736525e-06, |
|
"loss": 0.827, |
|
"step": 444500 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 6.252457727093984e-06, |
|
"loss": 0.8258, |
|
"step": 445000 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 6.203303185214314e-06, |
|
"loss": 0.8359, |
|
"step": 445500 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 6.1541486433346445e-06, |
|
"loss": 0.8356, |
|
"step": 446000 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 6.104994101454975e-06, |
|
"loss": 0.8164, |
|
"step": 446500 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 6.0558395595753045e-06, |
|
"loss": 0.8447, |
|
"step": 447000 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 6.006685017695636e-06, |
|
"loss": 0.8411, |
|
"step": 447500 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 5.957530475815966e-06, |
|
"loss": 0.8361, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 5.908375933936296e-06, |
|
"loss": 0.8258, |
|
"step": 448500 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 5.859221392056626e-06, |
|
"loss": 0.8248, |
|
"step": 449000 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 5.8100668501769565e-06, |
|
"loss": 0.8292, |
|
"step": 449500 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 5.760912308297287e-06, |
|
"loss": 0.8235, |
|
"step": 450000 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 5.711757766417617e-06, |
|
"loss": 0.8326, |
|
"step": 450500 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 5.662603224537948e-06, |
|
"loss": 0.8177, |
|
"step": 451000 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 5.613448682658278e-06, |
|
"loss": 0.8496, |
|
"step": 451500 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 5.564294140778608e-06, |
|
"loss": 0.8375, |
|
"step": 452000 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 5.515139598898939e-06, |
|
"loss": 0.8365, |
|
"step": 452500 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 5.465985057019269e-06, |
|
"loss": 0.8371, |
|
"step": 453000 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 5.416830515139599e-06, |
|
"loss": 0.8284, |
|
"step": 453500 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 5.367675973259929e-06, |
|
"loss": 0.8398, |
|
"step": 454000 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 5.31852143138026e-06, |
|
"loss": 0.8371, |
|
"step": 454500 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 5.26936688950059e-06, |
|
"loss": 0.8339, |
|
"step": 455000 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 5.2202123476209205e-06, |
|
"loss": 0.8288, |
|
"step": 455500 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 5.171057805741251e-06, |
|
"loss": 0.8191, |
|
"step": 456000 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 5.121903263861581e-06, |
|
"loss": 0.8308, |
|
"step": 456500 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 5.072748721981911e-06, |
|
"loss": 0.8308, |
|
"step": 457000 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 5.023594180102242e-06, |
|
"loss": 0.8238, |
|
"step": 457500 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 4.9744396382225725e-06, |
|
"loss": 0.8237, |
|
"step": 458000 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 4.925285096342902e-06, |
|
"loss": 0.8287, |
|
"step": 458500 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 4.8761305544632324e-06, |
|
"loss": 0.8243, |
|
"step": 459000 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 4.826976012583563e-06, |
|
"loss": 0.8379, |
|
"step": 459500 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 4.777821470703893e-06, |
|
"loss": 0.8295, |
|
"step": 460000 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 4.728666928824224e-06, |
|
"loss": 0.824, |
|
"step": 460500 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 4.679512386944554e-06, |
|
"loss": 0.8207, |
|
"step": 461000 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 4.6303578450648845e-06, |
|
"loss": 0.825, |
|
"step": 461500 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 4.581203303185214e-06, |
|
"loss": 0.8292, |
|
"step": 462000 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 4.532048761305545e-06, |
|
"loss": 0.8146, |
|
"step": 462500 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 4.482894219425876e-06, |
|
"loss": 0.825, |
|
"step": 463000 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 4.433739677546205e-06, |
|
"loss": 0.8277, |
|
"step": 463500 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 4.384585135666536e-06, |
|
"loss": 0.8252, |
|
"step": 464000 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 4.335430593786866e-06, |
|
"loss": 0.8263, |
|
"step": 464500 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 4.286276051907196e-06, |
|
"loss": 0.8302, |
|
"step": 465000 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 4.237121510027527e-06, |
|
"loss": 0.8321, |
|
"step": 465500 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 4.187966968147857e-06, |
|
"loss": 0.8288, |
|
"step": 466000 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 4.138812426268188e-06, |
|
"loss": 0.8346, |
|
"step": 466500 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 4.089657884388517e-06, |
|
"loss": 0.8262, |
|
"step": 467000 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 4.0405033425088484e-06, |
|
"loss": 0.8265, |
|
"step": 467500 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 3.991348800629179e-06, |
|
"loss": 0.8337, |
|
"step": 468000 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 3.942194258749508e-06, |
|
"loss": 0.8469, |
|
"step": 468500 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 3.893039716869839e-06, |
|
"loss": 0.8349, |
|
"step": 469000 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 3.843885174990169e-06, |
|
"loss": 0.8192, |
|
"step": 469500 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 3.794730633110499e-06, |
|
"loss": 0.8288, |
|
"step": 470000 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 3.74557609123083e-06, |
|
"loss": 0.8289, |
|
"step": 470500 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 3.6964215493511604e-06, |
|
"loss": 0.8261, |
|
"step": 471000 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 3.647267007471491e-06, |
|
"loss": 0.8309, |
|
"step": 471500 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 3.5981124655918208e-06, |
|
"loss": 0.8227, |
|
"step": 472000 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 3.548957923712151e-06, |
|
"loss": 0.8222, |
|
"step": 472500 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 3.4998033818324816e-06, |
|
"loss": 0.8214, |
|
"step": 473000 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 3.4506488399528116e-06, |
|
"loss": 0.8295, |
|
"step": 473500 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 3.401494298073142e-06, |
|
"loss": 0.8313, |
|
"step": 474000 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 3.3523397561934728e-06, |
|
"loss": 0.8416, |
|
"step": 474500 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 3.3031852143138023e-06, |
|
"loss": 0.8233, |
|
"step": 475000 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 3.254030672434133e-06, |
|
"loss": 0.8229, |
|
"step": 475500 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 3.2048761305544636e-06, |
|
"loss": 0.8294, |
|
"step": 476000 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 3.155721588674794e-06, |
|
"loss": 0.8313, |
|
"step": 476500 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 3.1065670467951244e-06, |
|
"loss": 0.8167, |
|
"step": 477000 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 3.0574125049154543e-06, |
|
"loss": 0.8252, |
|
"step": 477500 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 3.0082579630357847e-06, |
|
"loss": 0.824, |
|
"step": 478000 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 2.959103421156115e-06, |
|
"loss": 0.8238, |
|
"step": 478500 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 2.909948879276445e-06, |
|
"loss": 0.8217, |
|
"step": 479000 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 2.860794337396776e-06, |
|
"loss": 0.8427, |
|
"step": 479500 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 2.811639795517106e-06, |
|
"loss": 0.8242, |
|
"step": 480000 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 2.7624852536374363e-06, |
|
"loss": 0.8186, |
|
"step": 480500 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 2.7133307117577667e-06, |
|
"loss": 0.8238, |
|
"step": 481000 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 2.6641761698780967e-06, |
|
"loss": 0.8291, |
|
"step": 481500 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 2.615021627998427e-06, |
|
"loss": 0.8278, |
|
"step": 482000 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 2.5658670861187575e-06, |
|
"loss": 0.8383, |
|
"step": 482500 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 2.516712544239088e-06, |
|
"loss": 0.8377, |
|
"step": 483000 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 2.4675580023594183e-06, |
|
"loss": 0.8205, |
|
"step": 483500 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 2.4184034604797483e-06, |
|
"loss": 0.8293, |
|
"step": 484000 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 2.3692489186000787e-06, |
|
"loss": 0.8206, |
|
"step": 484500 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 2.320094376720409e-06, |
|
"loss": 0.826, |
|
"step": 485000 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 2.2709398348407395e-06, |
|
"loss": 0.8192, |
|
"step": 485500 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 2.22178529296107e-06, |
|
"loss": 0.8271, |
|
"step": 486000 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 2.1726307510814e-06, |
|
"loss": 0.8296, |
|
"step": 486500 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 2.1234762092017303e-06, |
|
"loss": 0.8286, |
|
"step": 487000 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 2.0743216673220607e-06, |
|
"loss": 0.8282, |
|
"step": 487500 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 2.025167125442391e-06, |
|
"loss": 0.8143, |
|
"step": 488000 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 1.9760125835627215e-06, |
|
"loss": 0.829, |
|
"step": 488500 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 1.9268580416830515e-06, |
|
"loss": 0.8236, |
|
"step": 489000 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 1.8777034998033819e-06, |
|
"loss": 0.8227, |
|
"step": 489500 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 1.8285489579237123e-06, |
|
"loss": 0.8212, |
|
"step": 490000 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 1.7793944160440425e-06, |
|
"loss": 0.8176, |
|
"step": 490500 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 1.730239874164373e-06, |
|
"loss": 0.823, |
|
"step": 491000 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 1.6810853322847033e-06, |
|
"loss": 0.8264, |
|
"step": 491500 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 1.6319307904050335e-06, |
|
"loss": 0.8226, |
|
"step": 492000 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 1.5827762485253639e-06, |
|
"loss": 0.8084, |
|
"step": 492500 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 1.533621706645694e-06, |
|
"loss": 0.8197, |
|
"step": 493000 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 1.4844671647660245e-06, |
|
"loss": 0.8289, |
|
"step": 493500 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 1.4353126228863549e-06, |
|
"loss": 0.8274, |
|
"step": 494000 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 1.386158081006685e-06, |
|
"loss": 0.8411, |
|
"step": 494500 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 1.3370035391270154e-06, |
|
"loss": 0.8231, |
|
"step": 495000 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 1.2878489972473456e-06, |
|
"loss": 0.8238, |
|
"step": 495500 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 1.238694455367676e-06, |
|
"loss": 0.8203, |
|
"step": 496000 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 1.1895399134880064e-06, |
|
"loss": 0.8342, |
|
"step": 496500 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 1.1403853716083366e-06, |
|
"loss": 0.8391, |
|
"step": 497000 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 1.091230829728667e-06, |
|
"loss": 0.8203, |
|
"step": 497500 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 1.0420762878489972e-06, |
|
"loss": 0.8204, |
|
"step": 498000 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 9.929217459693276e-07, |
|
"loss": 0.8269, |
|
"step": 498500 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 9.437672040896579e-07, |
|
"loss": 0.8294, |
|
"step": 499000 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 8.946126622099883e-07, |
|
"loss": 0.8278, |
|
"step": 499500 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 8.454581203303186e-07, |
|
"loss": 0.8291, |
|
"step": 500000 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 7.963035784506488e-07, |
|
"loss": 0.8261, |
|
"step": 500500 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 7.471490365709792e-07, |
|
"loss": 0.8279, |
|
"step": 501000 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 6.979944946913095e-07, |
|
"loss": 0.8283, |
|
"step": 501500 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 6.488399528116398e-07, |
|
"loss": 0.8279, |
|
"step": 502000 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 5.996854109319701e-07, |
|
"loss": 0.8286, |
|
"step": 502500 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 5.505308690523005e-07, |
|
"loss": 0.8287, |
|
"step": 503000 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 5.013763271726308e-07, |
|
"loss": 0.8136, |
|
"step": 503500 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 4.522217852929611e-07, |
|
"loss": 0.8294, |
|
"step": 504000 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 4.030672434132914e-07, |
|
"loss": 0.8238, |
|
"step": 504500 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 3.5391270153362173e-07, |
|
"loss": 0.8198, |
|
"step": 505000 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 3.0475815965395203e-07, |
|
"loss": 0.8169, |
|
"step": 505500 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 2.556036177742823e-07, |
|
"loss": 0.8261, |
|
"step": 506000 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 2.0644907589461265e-07, |
|
"loss": 0.8285, |
|
"step": 506500 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 1.57294534014943e-07, |
|
"loss": 0.8292, |
|
"step": 507000 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 1.0813999213527332e-07, |
|
"loss": 0.8324, |
|
"step": 507500 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"learning_rate": 5.898545025560362e-08, |
|
"loss": 0.8205, |
|
"step": 508000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 9.830908375933937e-09, |
|
"loss": 0.8153, |
|
"step": 508500 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 508600, |
|
"total_flos": 4.55385938558976e+17, |
|
"train_loss": 1.0178077638950866, |
|
"train_runtime": 115805.9671, |
|
"train_samples_per_second": 52.702, |
|
"train_steps_per_second": 4.392 |
|
} |
|
], |
|
"max_steps": 508600, |
|
"num_train_epochs": 10, |
|
"total_flos": 4.55385938558976e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|