|
{ |
|
"best_metric": 0.2726329565048218, |
|
"best_model_checkpoint": "./distilroberta-marian-training1/checkpoint-144000", |
|
"epoch": 10.0, |
|
"global_step": 160000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.225000000000001e-07, |
|
"loss": 0.1837, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2462500000000001e-06, |
|
"loss": 0.2157, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.8712500000000003e-06, |
|
"loss": 0.22, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.4962500000000005e-06, |
|
"loss": 0.1969, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.12125e-06, |
|
"loss": 0.2, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.745e-06, |
|
"loss": 0.2612, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.3700000000000005e-06, |
|
"loss": 0.2172, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.9950000000000005e-06, |
|
"loss": 0.1853, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5.620000000000001e-06, |
|
"loss": 0.1987, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 6.243750000000001e-06, |
|
"loss": 0.232, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6.868750000000001e-06, |
|
"loss": 0.2513, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.493750000000001e-06, |
|
"loss": 0.2117, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.11875e-06, |
|
"loss": 0.2349, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 8.74375e-06, |
|
"loss": 0.2328, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.3675e-06, |
|
"loss": 0.2738, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.992500000000001e-06, |
|
"loss": 0.2504, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.9675e-06, |
|
"loss": 0.246, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.934605263157895e-06, |
|
"loss": 0.2218, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.901776315789475e-06, |
|
"loss": 0.2868, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.86888157894737e-06, |
|
"loss": 0.2406, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.835986842105264e-06, |
|
"loss": 0.2783, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.803092105263159e-06, |
|
"loss": 0.2784, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.770197368421053e-06, |
|
"loss": 0.2379, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.737302631578948e-06, |
|
"loss": 0.2887, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.704473684210527e-06, |
|
"loss": 0.2569, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.671578947368422e-06, |
|
"loss": 0.2794, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.638684210526316e-06, |
|
"loss": 0.2573, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 9.605789473684211e-06, |
|
"loss": 0.251, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.572894736842107e-06, |
|
"loss": 0.268, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.54e-06, |
|
"loss": 0.2304, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.507105263157895e-06, |
|
"loss": 0.2631, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.474210526315791e-06, |
|
"loss": 0.2674, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_BLEU": 73.83414614372862, |
|
"eval_BLEU-Bigram-Precision": 79.1290558672847, |
|
"eval_BLEU-Trigram-Precision": 71.86590940573487, |
|
"eval_BLEU-Unigram-Precision": 87.71472059143292, |
|
"eval_ROUGE-2": 72.64315140484014, |
|
"eval_ROUGE-L": 88.44220304691733, |
|
"eval_Sacre-Bigram-Precision": 79.870557261957, |
|
"eval_Sacre-Trigram-Precision": 73.81098685019049, |
|
"eval_Sacre-Unigram-Precision": 87.78173190984579, |
|
"eval_SacreBLEU": 75.40013818024555, |
|
"eval_loss": 0.32742100954055786, |
|
"eval_runtime": 88.0501, |
|
"eval_samples_per_second": 11.357, |
|
"eval_steps_per_second": 11.357, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.441315789473686e-06, |
|
"loss": 0.1756, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.408486842105265e-06, |
|
"loss": 0.277, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.375592105263158e-06, |
|
"loss": 0.2313, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.342697368421053e-06, |
|
"loss": 0.2596, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 9.309802631578949e-06, |
|
"loss": 0.2473, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.276907894736843e-06, |
|
"loss": 0.2274, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 9.244144736842106e-06, |
|
"loss": 0.2314, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 9.211250000000001e-06, |
|
"loss": 0.1981, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 9.178355263157896e-06, |
|
"loss": 0.2544, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.14546052631579e-06, |
|
"loss": 0.2123, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 9.112565789473685e-06, |
|
"loss": 0.2215, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 9.079671052631581e-06, |
|
"loss": 0.2134, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 9.046776315789474e-06, |
|
"loss": 0.2684, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.013881578947368e-06, |
|
"loss": 0.2485, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.981052631578948e-06, |
|
"loss": 0.1944, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 8.948157894736843e-06, |
|
"loss": 0.2126, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 8.915263157894739e-06, |
|
"loss": 0.2315, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 8.882368421052632e-06, |
|
"loss": 0.224, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 8.849473684210526e-06, |
|
"loss": 0.2295, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 8.81657894736842e-06, |
|
"loss": 0.1894, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.783684210526317e-06, |
|
"loss": 0.2191, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.750855263157895e-06, |
|
"loss": 0.1989, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.717960526315791e-06, |
|
"loss": 0.1923, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.685065789473684e-06, |
|
"loss": 0.1979, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.652171052631579e-06, |
|
"loss": 0.2166, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.619342105263158e-06, |
|
"loss": 0.234, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 8.586447368421053e-06, |
|
"loss": 0.2352, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 8.553552631578949e-06, |
|
"loss": 0.1886, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 8.520657894736842e-06, |
|
"loss": 0.2233, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 8.487763157894737e-06, |
|
"loss": 0.2737, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 8.454868421052633e-06, |
|
"loss": 0.1909, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 8.421973684210527e-06, |
|
"loss": 0.1936, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_BLEU": 75.36161100581221, |
|
"eval_BLEU-Bigram-Precision": 80.48572125945815, |
|
"eval_BLEU-Trigram-Precision": 73.76299376299377, |
|
"eval_BLEU-Unigram-Precision": 88.22057863824233, |
|
"eval_ROUGE-2": 74.09506184806267, |
|
"eval_ROUGE-L": 89.05035697782122, |
|
"eval_Sacre-Bigram-Precision": 81.32291437376183, |
|
"eval_Sacre-Trigram-Precision": 75.74935241149623, |
|
"eval_Sacre-Unigram-Precision": 88.4195915129883, |
|
"eval_SacreBLEU": 76.75621822222277, |
|
"eval_loss": 0.3107452392578125, |
|
"eval_runtime": 88.8932, |
|
"eval_samples_per_second": 11.249, |
|
"eval_steps_per_second": 11.249, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.389078947368422e-06, |
|
"loss": 0.1905, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 8.356184210526316e-06, |
|
"loss": 0.1765, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 8.323355263157894e-06, |
|
"loss": 0.1643, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 8.29046052631579e-06, |
|
"loss": 0.1843, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 8.257565789473685e-06, |
|
"loss": 0.1825, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 8.224736842105265e-06, |
|
"loss": 0.1374, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 8.191842105263158e-06, |
|
"loss": 0.2312, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 8.158947368421052e-06, |
|
"loss": 0.217, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 8.126052631578949e-06, |
|
"loss": 0.174, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 8.093157894736843e-06, |
|
"loss": 0.1485, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 8.060263157894738e-06, |
|
"loss": 0.1364, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 8.027434210526317e-06, |
|
"loss": 0.1894, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 7.99453947368421e-06, |
|
"loss": 0.1658, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 7.961644736842106e-06, |
|
"loss": 0.2059, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 7.928750000000001e-06, |
|
"loss": 0.2102, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 7.89592105263158e-06, |
|
"loss": 0.2032, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.863026315789475e-06, |
|
"loss": 0.2062, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.830131578947368e-06, |
|
"loss": 0.1848, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 7.797236842105264e-06, |
|
"loss": 0.2161, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 7.764342105263159e-06, |
|
"loss": 0.1746, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.731447368421053e-06, |
|
"loss": 0.1979, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 7.698618421052633e-06, |
|
"loss": 0.1569, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 7.665723684210526e-06, |
|
"loss": 0.2154, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 7.632828947368422e-06, |
|
"loss": 0.1723, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 7.599934210526317e-06, |
|
"loss": 0.1751, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 7.5671052631578955e-06, |
|
"loss": 0.1828, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 7.534210526315791e-06, |
|
"loss": 0.2034, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 7.501315789473685e-06, |
|
"loss": 0.1736, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 7.468421052631579e-06, |
|
"loss": 0.1858, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 7.435526315789474e-06, |
|
"loss": 0.19, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 7.402697368421053e-06, |
|
"loss": 0.176, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 7.369802631578949e-06, |
|
"loss": 0.17, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_BLEU": 76.96135367983078, |
|
"eval_BLEU-Bigram-Precision": 80.3268909568122, |
|
"eval_BLEU-Trigram-Precision": 73.89894623074844, |
|
"eval_BLEU-Unigram-Precision": 88.08356427201022, |
|
"eval_ROUGE-2": 74.80911471749707, |
|
"eval_ROUGE-L": 89.60895451818533, |
|
"eval_Sacre-Bigram-Precision": 81.12129380053908, |
|
"eval_Sacre-Trigram-Precision": 75.68414707655214, |
|
"eval_Sacre-Unigram-Precision": 88.19464720194647, |
|
"eval_SacreBLEU": 78.11560799454091, |
|
"eval_loss": 0.3022772967815399, |
|
"eval_runtime": 90.1953, |
|
"eval_samples_per_second": 11.087, |
|
"eval_steps_per_second": 11.087, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 7.3369078947368425e-06, |
|
"loss": 0.1881, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 7.304013157894737e-06, |
|
"loss": 0.148, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 7.271118421052632e-06, |
|
"loss": 0.1596, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 7.238223684210527e-06, |
|
"loss": 0.1553, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 7.205328947368422e-06, |
|
"loss": 0.1953, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 7.172434210526317e-06, |
|
"loss": 0.1691, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 7.1395394736842115e-06, |
|
"loss": 0.1448, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 7.1067105263157895e-06, |
|
"loss": 0.1379, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 7.073881578947369e-06, |
|
"loss": 0.1322, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 7.040986842105264e-06, |
|
"loss": 0.1439, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 7.008092105263159e-06, |
|
"loss": 0.1838, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 6.975197368421053e-06, |
|
"loss": 0.1548, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 6.9423684210526324e-06, |
|
"loss": 0.1375, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 6.909473684210527e-06, |
|
"loss": 0.1658, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 6.876644736842107e-06, |
|
"loss": 0.1514, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 6.84375e-06, |
|
"loss": 0.1835, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 6.810855263157895e-06, |
|
"loss": 0.1397, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 6.77796052631579e-06, |
|
"loss": 0.1324, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 6.745065789473685e-06, |
|
"loss": 0.1426, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 6.7121710526315794e-06, |
|
"loss": 0.1932, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 6.679276315789475e-06, |
|
"loss": 0.1643, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 6.6463815789473685e-06, |
|
"loss": 0.1606, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 6.613486842105263e-06, |
|
"loss": 0.1627, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 6.5805921052631585e-06, |
|
"loss": 0.1643, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 6.547697368421053e-06, |
|
"loss": 0.1321, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 6.5148026315789485e-06, |
|
"loss": 0.1282, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 6.481907894736843e-06, |
|
"loss": 0.1495, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 6.449078947368421e-06, |
|
"loss": 0.1518, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 6.416184210526316e-06, |
|
"loss": 0.1508, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 6.383289473684211e-06, |
|
"loss": 0.1487, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 6.350394736842106e-06, |
|
"loss": 0.1368, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 6.317500000000001e-06, |
|
"loss": 0.1371, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_BLEU": 76.97562999548838, |
|
"eval_BLEU-Bigram-Precision": 80.0071106897369, |
|
"eval_BLEU-Trigram-Precision": 73.54518637704479, |
|
"eval_BLEU-Unigram-Precision": 87.63509218054672, |
|
"eval_ROUGE-2": 75.31771486560173, |
|
"eval_ROUGE-L": 89.81535115722117, |
|
"eval_Sacre-Bigram-Precision": 80.88565621991657, |
|
"eval_Sacre-Trigram-Precision": 75.51678814673198, |
|
"eval_Sacre-Unigram-Precision": 87.88288723548169, |
|
"eval_SacreBLEU": 78.47888708880886, |
|
"eval_loss": 0.2903847098350525, |
|
"eval_runtime": 90.0073, |
|
"eval_samples_per_second": 11.11, |
|
"eval_steps_per_second": 11.11, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 6.2846052631578955e-06, |
|
"loss": 0.1406, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 6.251710526315789e-06, |
|
"loss": 0.1134, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 6.218815789473685e-06, |
|
"loss": 0.126, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 6.1860526315789485e-06, |
|
"loss": 0.1274, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 6.153157894736843e-06, |
|
"loss": 0.134, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 6.120263157894737e-06, |
|
"loss": 0.1462, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 6.087368421052632e-06, |
|
"loss": 0.1203, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 6.054539473684211e-06, |
|
"loss": 0.1602, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 6.021644736842106e-06, |
|
"loss": 0.1367, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 5.988750000000001e-06, |
|
"loss": 0.1087, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 5.955921052631579e-06, |
|
"loss": 0.1203, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 5.923026315789474e-06, |
|
"loss": 0.1199, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 5.890131578947369e-06, |
|
"loss": 0.1183, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 5.857236842105264e-06, |
|
"loss": 0.1366, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 5.824407894736842e-06, |
|
"loss": 0.1205, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 5.791513157894737e-06, |
|
"loss": 0.1537, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 5.758618421052632e-06, |
|
"loss": 0.1386, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 5.725723684210527e-06, |
|
"loss": 0.1634, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 5.692828947368422e-06, |
|
"loss": 0.1005, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 5.659934210526317e-06, |
|
"loss": 0.1546, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 5.62703947368421e-06, |
|
"loss": 0.1212, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 5.594144736842106e-06, |
|
"loss": 0.1446, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 5.5613157894736846e-06, |
|
"loss": 0.1339, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 5.52842105263158e-06, |
|
"loss": 0.1353, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 5.4955263157894745e-06, |
|
"loss": 0.1512, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 5.462631578947369e-06, |
|
"loss": 0.1217, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 5.429736842105263e-06, |
|
"loss": 0.1288, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 5.3969078947368424e-06, |
|
"loss": 0.155, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 5.364013157894738e-06, |
|
"loss": 0.128, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 5.331118421052632e-06, |
|
"loss": 0.1224, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 5.298223684210527e-06, |
|
"loss": 0.1245, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 5.265394736842106e-06, |
|
"loss": 0.1314, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_BLEU": 77.77478989310802, |
|
"eval_BLEU-Bigram-Precision": 82.58159149248259, |
|
"eval_BLEU-Trigram-Precision": 76.65601999722261, |
|
"eval_BLEU-Unigram-Precision": 89.58719093780634, |
|
"eval_ROUGE-2": 76.21694739487536, |
|
"eval_ROUGE-L": 90.30321591632153, |
|
"eval_Sacre-Bigram-Precision": 83.30402286216751, |
|
"eval_Sacre-Trigram-Precision": 78.39369302783938, |
|
"eval_Sacre-Unigram-Precision": 89.64151317092494, |
|
"eval_SacreBLEU": 79.14676248093514, |
|
"eval_loss": 0.2857072055339813, |
|
"eval_runtime": 88.6134, |
|
"eval_samples_per_second": 11.285, |
|
"eval_steps_per_second": 11.285, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 5.2325e-06, |
|
"loss": 0.0962, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 5.199605263157895e-06, |
|
"loss": 0.1223, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 5.1667763157894745e-06, |
|
"loss": 0.1234, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 5.133881578947368e-06, |
|
"loss": 0.0942, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 5.100986842105264e-06, |
|
"loss": 0.088, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 5.068092105263158e-06, |
|
"loss": 0.1013, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 5.035197368421053e-06, |
|
"loss": 0.1243, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 5.002302631578948e-06, |
|
"loss": 0.1097, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 4.969407894736843e-06, |
|
"loss": 0.1122, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 4.936513157894737e-06, |
|
"loss": 0.1474, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 4.903618421052632e-06, |
|
"loss": 0.1107, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 4.870723684210526e-06, |
|
"loss": 0.119, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 4.837894736842106e-06, |
|
"loss": 0.0859, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 4.805000000000001e-06, |
|
"loss": 0.1172, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 4.772171052631579e-06, |
|
"loss": 0.1017, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 4.739276315789474e-06, |
|
"loss": 0.0847, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 4.7063815789473685e-06, |
|
"loss": 0.1437, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 4.673486842105264e-06, |
|
"loss": 0.1294, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 4.6405921052631585e-06, |
|
"loss": 0.1037, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 4.607697368421053e-06, |
|
"loss": 0.1108, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 4.574868421052632e-06, |
|
"loss": 0.1186, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 4.541973684210526e-06, |
|
"loss": 0.1252, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 4.509078947368422e-06, |
|
"loss": 0.1202, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 4.476184210526316e-06, |
|
"loss": 0.122, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 4.443289473684211e-06, |
|
"loss": 0.136, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 4.4103947368421055e-06, |
|
"loss": 0.1213, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 4.3775e-06, |
|
"loss": 0.1041, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 4.344605263157895e-06, |
|
"loss": 0.1339, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 4.31171052631579e-06, |
|
"loss": 0.0929, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 4.2788157894736845e-06, |
|
"loss": 0.1185, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 4.245986842105263e-06, |
|
"loss": 0.1242, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 4.213092105263159e-06, |
|
"loss": 0.1, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_BLEU": 79.11019971744633, |
|
"eval_BLEU-Bigram-Precision": 85.00372116100223, |
|
"eval_BLEU-Trigram-Precision": 79.31375317706862, |
|
"eval_BLEU-Unigram-Precision": 91.3153829176782, |
|
"eval_ROUGE-2": 76.72218981179273, |
|
"eval_ROUGE-L": 90.74286437693775, |
|
"eval_Sacre-Bigram-Precision": 85.59587813620071, |
|
"eval_Sacre-Trigram-Precision": 81.08958228485153, |
|
"eval_Sacre-Unigram-Precision": 91.41821112006447, |
|
"eval_SacreBLEU": 80.09298309615393, |
|
"eval_loss": 0.2855425179004669, |
|
"eval_runtime": 87.7278, |
|
"eval_samples_per_second": 11.399, |
|
"eval_steps_per_second": 11.399, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 4.1801973684210525e-06, |
|
"loss": 0.0992, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 4.147302631578948e-06, |
|
"loss": 0.0906, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 4.114407894736842e-06, |
|
"loss": 0.114, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 4.081578947368421e-06, |
|
"loss": 0.0868, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 4.048684210526317e-06, |
|
"loss": 0.0837, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 4.01578947368421e-06, |
|
"loss": 0.1157, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 3.982894736842106e-06, |
|
"loss": 0.0993, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 3.95e-06, |
|
"loss": 0.1214, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 3.917171052631579e-06, |
|
"loss": 0.1167, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 3.884342105263158e-06, |
|
"loss": 0.1021, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 3.851447368421053e-06, |
|
"loss": 0.0963, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 3.818552631578948e-06, |
|
"loss": 0.1014, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 3.7856578947368424e-06, |
|
"loss": 0.096, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 3.752763157894737e-06, |
|
"loss": 0.105, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 3.719868421052632e-06, |
|
"loss": 0.095, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 3.6870394736842107e-06, |
|
"loss": 0.1049, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 3.6541447368421057e-06, |
|
"loss": 0.1037, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 3.62125e-06, |
|
"loss": 0.0754, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 3.588355263157895e-06, |
|
"loss": 0.0996, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 3.55546052631579e-06, |
|
"loss": 0.1024, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 3.522565789473685e-06, |
|
"loss": 0.0926, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 3.489671052631579e-06, |
|
"loss": 0.0782, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 3.456776315789474e-06, |
|
"loss": 0.1045, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 3.423881578947369e-06, |
|
"loss": 0.1079, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 3.3910526315789477e-06, |
|
"loss": 0.0924, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 3.3582236842105265e-06, |
|
"loss": 0.1352, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 3.3253289473684215e-06, |
|
"loss": 0.0865, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 3.2924342105263164e-06, |
|
"loss": 0.0924, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 3.2595394736842106e-06, |
|
"loss": 0.0812, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 3.2266447368421056e-06, |
|
"loss": 0.1081, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 3.1937500000000006e-06, |
|
"loss": 0.0872, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 3.1608552631578947e-06, |
|
"loss": 0.0855, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_BLEU": 79.18243013411383, |
|
"eval_BLEU-Bigram-Precision": 83.65829880575188, |
|
"eval_BLEU-Trigram-Precision": 78.10683642402437, |
|
"eval_BLEU-Unigram-Precision": 90.08255485552901, |
|
"eval_ROUGE-2": 76.92605201616105, |
|
"eval_ROUGE-L": 90.80400601001202, |
|
"eval_Sacre-Bigram-Precision": 84.62469733656174, |
|
"eval_Sacre-Trigram-Precision": 80.1505057981742, |
|
"eval_Sacre-Unigram-Precision": 90.4322823716042, |
|
"eval_SacreBLEU": 80.51163956701332, |
|
"eval_loss": 0.2782886326313019, |
|
"eval_runtime": 88.5286, |
|
"eval_samples_per_second": 11.296, |
|
"eval_steps_per_second": 11.296, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 3.1279605263157897e-06, |
|
"loss": 0.0686, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 3.0951315789473685e-06, |
|
"loss": 0.1035, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 3.0623026315789477e-06, |
|
"loss": 0.0822, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 3.0294078947368422e-06, |
|
"loss": 0.074, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 2.996513157894737e-06, |
|
"loss": 0.0931, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 2.9636184210526318e-06, |
|
"loss": 0.0849, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 2.9307236842105263e-06, |
|
"loss": 0.1084, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 2.8978289473684213e-06, |
|
"loss": 0.0754, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 2.8649342105263163e-06, |
|
"loss": 0.0711, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 2.8320394736842104e-06, |
|
"loss": 0.0965, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 2.7992105263157896e-06, |
|
"loss": 0.097, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 2.766315789473684e-06, |
|
"loss": 0.0852, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 2.733421052631579e-06, |
|
"loss": 0.0847, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 2.700526315789474e-06, |
|
"loss": 0.0935, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 2.6676315789473687e-06, |
|
"loss": 0.0728, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 2.6347368421052633e-06, |
|
"loss": 0.0912, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 2.6018421052631583e-06, |
|
"loss": 0.0769, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 2.5689473684210533e-06, |
|
"loss": 0.0874, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 2.5360526315789474e-06, |
|
"loss": 0.0841, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 2.5031578947368424e-06, |
|
"loss": 0.0785, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 2.470328947368421e-06, |
|
"loss": 0.0729, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 2.437434210526316e-06, |
|
"loss": 0.0871, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 2.4045394736842107e-06, |
|
"loss": 0.1076, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 2.3716447368421057e-06, |
|
"loss": 0.1002, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 2.3388157894736845e-06, |
|
"loss": 0.0812, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 2.305921052631579e-06, |
|
"loss": 0.087, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 2.273026315789474e-06, |
|
"loss": 0.0767, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 2.2401315789473686e-06, |
|
"loss": 0.1071, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 2.2072368421052636e-06, |
|
"loss": 0.1095, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 2.174342105263158e-06, |
|
"loss": 0.0983, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 2.141447368421053e-06, |
|
"loss": 0.0786, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 2.1085526315789477e-06, |
|
"loss": 0.0941, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_BLEU": 79.82458025693062, |
|
"eval_BLEU-Bigram-Precision": 83.85884065001213, |
|
"eval_BLEU-Trigram-Precision": 78.5439031103771, |
|
"eval_BLEU-Unigram-Precision": 90.06056673155959, |
|
"eval_ROUGE-2": 77.16039909105272, |
|
"eval_ROUGE-L": 90.85908984738971, |
|
"eval_Sacre-Bigram-Precision": 84.34089917066783, |
|
"eval_Sacre-Trigram-Precision": 80.05865102639297, |
|
"eval_Sacre-Unigram-Precision": 89.97441951987406, |
|
"eval_SacreBLEU": 80.93271666051959, |
|
"eval_loss": 0.2738938629627228, |
|
"eval_runtime": 88.7035, |
|
"eval_samples_per_second": 11.274, |
|
"eval_steps_per_second": 11.274, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 2.0757236842105265e-06, |
|
"loss": 0.085, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 2.0428947368421053e-06, |
|
"loss": 0.099, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 2.0100000000000002e-06, |
|
"loss": 0.0742, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 1.977105263157895e-06, |
|
"loss": 0.0918, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 1.9442105263157894e-06, |
|
"loss": 0.0821, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 1.9113157894736843e-06, |
|
"loss": 0.0724, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 1.8784210526315791e-06, |
|
"loss": 0.0654, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 1.8455263157894737e-06, |
|
"loss": 0.0906, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 1.8126315789473687e-06, |
|
"loss": 0.1159, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 1.7797368421052632e-06, |
|
"loss": 0.0841, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 1.7468421052631582e-06, |
|
"loss": 0.0626, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 1.7139473684210528e-06, |
|
"loss": 0.0721, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 1.6810526315789473e-06, |
|
"loss": 0.0775, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 1.6481578947368423e-06, |
|
"loss": 0.0756, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 1.615328947368421e-06, |
|
"loss": 0.0736, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 1.5825e-06, |
|
"loss": 0.086, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 1.5496052631578949e-06, |
|
"loss": 0.0776, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 1.5167105263157896e-06, |
|
"loss": 0.0785, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 1.4838157894736844e-06, |
|
"loss": 0.0761, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 1.450921052631579e-06, |
|
"loss": 0.0919, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 1.418092105263158e-06, |
|
"loss": 0.0963, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 1.3851973684210525e-06, |
|
"loss": 0.0682, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 1.3523026315789475e-06, |
|
"loss": 0.0701, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 1.319407894736842e-06, |
|
"loss": 0.0873, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 1.286513157894737e-06, |
|
"loss": 0.0778, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 1.2536184210526316e-06, |
|
"loss": 0.0854, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 1.2207236842105264e-06, |
|
"loss": 0.0727, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 1.1878289473684212e-06, |
|
"loss": 0.0798, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 1.1550657894736842e-06, |
|
"loss": 0.0629, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 1.122171052631579e-06, |
|
"loss": 0.0746, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 1.0892763157894737e-06, |
|
"loss": 0.0775, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 1.0563815789473685e-06, |
|
"loss": 0.0825, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_BLEU": 79.76894087868382, |
|
"eval_BLEU-Bigram-Precision": 82.89615522817104, |
|
"eval_BLEU-Trigram-Precision": 77.52747998371557, |
|
"eval_BLEU-Unigram-Precision": 89.2715798481121, |
|
"eval_ROUGE-2": 77.13688197598398, |
|
"eval_ROUGE-L": 90.87326567997422, |
|
"eval_Sacre-Bigram-Precision": 83.55085751267393, |
|
"eval_Sacre-Trigram-Precision": 79.18224580870823, |
|
"eval_Sacre-Unigram-Precision": 89.34865154317983, |
|
"eval_SacreBLEU": 80.98475876485695, |
|
"eval_loss": 0.2726329565048218, |
|
"eval_runtime": 89.7408, |
|
"eval_samples_per_second": 11.143, |
|
"eval_steps_per_second": 11.143, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 1.0234868421052633e-06, |
|
"loss": 0.0772, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 9.90592105263158e-07, |
|
"loss": 0.075, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 9.577631578947368e-07, |
|
"loss": 0.0837, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 9.248684210526317e-07, |
|
"loss": 0.0809, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 8.919736842105265e-07, |
|
"loss": 0.0698, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 8.591447368421054e-07, |
|
"loss": 0.0851, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 8.262500000000001e-07, |
|
"loss": 0.0698, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 7.933552631578947e-07, |
|
"loss": 0.0822, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 7.604605263157895e-07, |
|
"loss": 0.0786, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 7.275657894736842e-07, |
|
"loss": 0.078, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 6.94671052631579e-07, |
|
"loss": 0.0762, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 6.617763157894737e-07, |
|
"loss": 0.071, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 6.288815789473685e-07, |
|
"loss": 0.0742, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 5.959868421052632e-07, |
|
"loss": 0.0716, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 5.630921052631579e-07, |
|
"loss": 0.0602, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 5.302631578947369e-07, |
|
"loss": 0.0978, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 4.973684210526316e-07, |
|
"loss": 0.0731, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 4.6453947368421055e-07, |
|
"loss": 0.0887, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 4.316447368421053e-07, |
|
"loss": 0.0895, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 3.9875000000000005e-07, |
|
"loss": 0.0756, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 3.6585526315789477e-07, |
|
"loss": 0.0795, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 3.329605263157895e-07, |
|
"loss": 0.0836, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 3.0006578947368426e-07, |
|
"loss": 0.08, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 2.67171052631579e-07, |
|
"loss": 0.0676, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 2.3434210526315792e-07, |
|
"loss": 0.0931, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 2.0144736842105264e-07, |
|
"loss": 0.0717, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 1.685526315789474e-07, |
|
"loss": 0.078, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 1.356578947368421e-07, |
|
"loss": 0.0618, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 1.0276315789473684e-07, |
|
"loss": 0.0783, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 6.986842105263158e-08, |
|
"loss": 0.0823, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 3.6973684210526316e-08, |
|
"loss": 0.0558, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 4.078947368421053e-09, |
|
"loss": 0.0776, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_BLEU": 80.25661524924499, |
|
"eval_BLEU-Bigram-Precision": 83.93569442765623, |
|
"eval_BLEU-Trigram-Precision": 78.80159056629645, |
|
"eval_BLEU-Unigram-Precision": 90.10029116790682, |
|
"eval_ROUGE-2": 77.49770887608805, |
|
"eval_ROUGE-L": 91.11406002685167, |
|
"eval_Sacre-Bigram-Precision": 84.69510199629104, |
|
"eval_Sacre-Trigram-Precision": 80.59118114083303, |
|
"eval_Sacre-Unigram-Precision": 90.23310711124225, |
|
"eval_SacreBLEU": 81.40117848493372, |
|
"eval_loss": 0.27461713552474976, |
|
"eval_runtime": 88.6539, |
|
"eval_samples_per_second": 11.28, |
|
"eval_steps_per_second": 11.28, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 160000, |
|
"total_flos": 2111609241600000.0, |
|
"train_loss": 0.13924457960128783, |
|
"train_runtime": 13035.4538, |
|
"train_samples_per_second": 12.274, |
|
"train_steps_per_second": 12.274 |
|
} |
|
], |
|
"max_steps": 160000, |
|
"num_train_epochs": 10, |
|
"total_flos": 2111609241600000.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|