easy-translate / sample_text /en2es.m2m100_12B.json
Iker's picture
Add m2m100_12B example
4ae2a0d
raw
history blame
3.07 kB
{
"sacrebleu": {
"score": 33.436451348487466,
"counts": [
19586,
11874,
7945,
5450
],
"totals": [
31477,
30479,
29481,
28485
],
"precisions": [
62.22321059821457,
38.957971062042716,
26.94956073403209,
19.132876952782166
],
"bp": 1.0,
"sys_len": 31477,
"ref_len": 30620
},
"rouge": {
"rouge1": [
[
0.5868986191655717,
0.5990516851228531,
0.5892049154144693
],
[
0.5977571827236499,
0.6099207497981789,
0.5995111761931164
],
[
0.6079524761878128,
0.6202142720286049,
0.6094761236959281
]
],
"rouge2": [
[
0.3706071661208073,
0.3785178998280979,
0.37195231176110016
],
[
0.3822973112445337,
0.3910339885523211,
0.3840413583143001
],
[
0.3943269285733919,
0.40342286677145006,
0.39586855043691616
]
],
"rougeL": [
[
0.5396432739320616,
0.5500014160494135,
0.5415782549465077
],
[
0.5501208131226542,
0.5612357333331699,
0.5516218104057385
],
[
0.5618124341271667,
0.572879684102229,
0.562976363702977
]
],
"rougeLsum": [
[
0.538985057343137,
0.5513723525906167,
0.5416040213401992
],
[
0.5506317054043084,
0.5618544163370318,
0.552372087766954
],
[
0.562072829679551,
0.5729279153458795,
0.5629424360218582
]
]
},
"bleu": {
"bleu": 0.2966828370181192,
"precisions": [
0.5677756586847496,
0.3517718553228877,
0.23602720468102892,
0.1643500236929395
],
"brevity_penalty": 1.0,
"length_ratio": 1.029862146728258,
"translation_length": 28314,
"reference_length": 27493
},
"meteor": {
"meteor": 0.48057062339433765
},
"ter": {
"score": 57.60375368275561,
"num_edits": 15837,
"ref_length": 27493.0
},
"bert_score": {
"precision": 0.8292220607697963,
"recall": 0.8316881820857525,
"f1": 0.8299418310523033,
"hashcode": "microsoft/deberta-xlarge-mnli_L40_no-idf_version=0.3.11(hug_trans=4.18.0)_fast-tokenizer"
}
}