xtreme_s_xlsr_300m_minds14 / predict_results.json
anton-l's picture
anton-l HF staff
Update predict_results.json
ee1dce7
raw
history blame
2.1 kB
{
"eval_accuracy": 0.9032642188245054,
"eval_accuracy_cs-CZ": 0.9163763066202091,
"eval_accuracy_de-DE": 0.9477124183006536,
"eval_accuracy_en-AU": 0.9235474006116208,
"eval_accuracy_en-GB": 0.9324324324324325,
"eval_accuracy_en-US": 0.9326241134751773,
"eval_accuracy_es-ES": 0.9176954732510288,
"eval_accuracy_fr-FR": 0.9444444444444444,
"eval_accuracy_it-IT": 0.9166666666666666,
"eval_accuracy_ko-KR": 0.8648648648648649,
"eval_accuracy_nl-NL": 0.944954128440367,
"eval_accuracy_pl-PL": 0.9145907473309609,
"eval_accuracy_pt-PT": 0.8940397350993378,
"eval_accuracy_ru-RU": 0.8666666666666667,
"eval_accuracy_zh-CN": 0.7290836653386454,
"eval_f1": 0.9014919257241022,
"eval_f1_cs-CZ": 0.9153654325990654,
"eval_f1_de-DE": 0.9467113817272022,
"eval_f1_en-AU": 0.9199176581170168,
"eval_f1_en-GB": 0.9334411627066194,
"eval_f1_en-US": 0.9307655606266035,
"eval_f1_es-ES": 0.915783399735911,
"eval_f1_fr-FR": 0.9436294760276339,
"eval_f1_it-IT": 0.9135402839075188,
"eval_f1_ko-KR": 0.8641714515484098,
"eval_f1_nl-NL": 0.943962672218691,
"eval_f1_pl-PL": 0.9158658012269524,
"eval_f1_pt-PT": 0.8882619547072632,
"eval_f1_ru-RU": 0.8645822850643244,
"eval_f1_zh-CN": 0.7248884399242215,
"eval_loss": 0.4118512070604733,
"eval_loss_cs-CZ": 0.37904757261276245,
"eval_loss_de-DE": 0.2648610770702362,
"eval_loss_en-AU": 0.34592190384864807,
"eval_loss_en-GB": 0.2852662205696106,
"eval_loss_en-US": 0.2203420102596283,
"eval_loss_es-ES": 0.2730692923069,
"eval_loss_fr-FR": 0.19085757434368134,
"eval_loss_it-IT": 0.3520455062389374,
"eval_loss_ko-KR": 0.5431176424026489,
"eval_loss_nl-NL": 0.2515396773815155,
"eval_loss_pl-PL": 0.4113442003726959,
"eval_loss_pt-PT": 0.4798424243927002,
"eval_loss_ru-RU": 0.6470151543617249,
"eval_loss_zh-CN": 1.1216466426849365,
"eval_runtime": 41.72093571428571,
"eval_samples_per_second": 7.823785714285714,
"eval_steps_per_second": 0.5,
"predict_samples": 4086
}