scenario-TCR-NER_data-univner_full / eval_result_ner.json
haryoaw's picture
Initial Commit
85fa868 verified
{"zh_gsd": {"precision": 0.841168996188056, "recall": 0.863102998696219, "f1": 0.851994851994852, "accuracy": 0.9788544788544788}, "zh_gsdsimp": {"precision": 0.8396464646464646, "recall": 0.8715596330275229, "f1": 0.8553054662379421, "accuracy": 0.9798534798534798}, "hr_set": {"precision": 0.9076607387140903, "recall": 0.9458303635067712, "f1": 0.9263525305410121, "accuracy": 0.990436933223413}, "da_ddt": {"precision": 0.8614318706697459, "recall": 0.8344519015659956, "f1": 0.8477272727272728, "accuracy": 0.9883268482490273}, "en_ewt": {"precision": 0.8122775800711743, "recall": 0.8391544117647058, "f1": 0.825497287522604, "accuracy": 0.9809937442722237}, "pt_bosque": {"precision": 0.8849918433931484, "recall": 0.8930041152263375, "f1": 0.8889799262597297, "accuracy": 0.9896754093609622}, "sr_set": {"precision": 0.950354609929078, "recall": 0.9492325855962219, "f1": 0.9497932663910219, "accuracy": 0.9905437352245863}, "sk_snk": {"precision": 0.8281417830290011, "recall": 0.8426229508196721, "f1": 0.8353196099674973, "accuracy": 0.9746388190954773}, "sv_talbanken": {"precision": 0.8701923076923077, "recall": 0.923469387755102, "f1": 0.8960396039603961, "accuracy": 0.9979879275653923}}