test_json / data /train-[mistralai@Mistral-7B-Instruct-v0.3][Setting3][icl][answer.txt].json
zhaorui-nb's picture
Upload data/train-[mistralai@Mistral-7B-Instruct-v0.3][Setting3][icl][answer.txt].json with huggingface_hub
8dc68d8 verified
raw
history blame contribute delete
No virus
2.17 kB
{
"model name": "mistralai@Mistral-7B-Instruct-v0.3",
"dataset": "Setting3",
"method": "icl",
"file name": "answer.txt",
"submitter": "zhaorui",
"MICRO precision": 0.8402185838467381,
"MICRO recall": 0.6576878612716763,
"MICRO f1": 0.7378318789024142,
"MACRO precision": 0.8043702809544941,
"MACRO recall": 0.634122092935016,
"MACRO f1": 0.7091715956399902,
"detail result": {
"LOCATION": {
"precision": 0.8358999509563512,
"recall": 0.6168657256605139,
"f1": 0.7098708871303623,
"support": 13815
},
"CONTACT": {
"precision": 0.8289156626506025,
"recall": 0.6502835538752363,
"f1": 0.728813559322034,
"support": 529
},
"DATE": {
"precision": 0.8623968675709691,
"recall": 0.6431662929551024,
"f1": 0.7368200961796948,
"support": 19177
},
"NAME": {
"precision": 0.8296005972377752,
"recall": 0.6472986748216106,
"f1": 0.7271983640081799,
"support": 13734
},
"ID": {
"precision": 0.8709163346613545,
"recall": 0.8419566054692516,
"f1": 0.8561916574188915,
"support": 7789
},
"AGE": {
"precision": 0.7862318840579711,
"recall": 0.6374541003671971,
"f1": 0.7040692172502365,
"support": 4085
},
"PROFESSION": {
"precision": 0.6166306695464363,
"recall": 0.40182969739619984,
"f1": 0.4865786109927567,
"support": 1421
},
"MICRO_AVERAGE": {
"precision": 0.8402185838467381,
"recall": 0.6576878612716763,
"f1": 0.7378318789024142,
"support": 60550
},
"MACRO_AVERAGE": {
"precision": 0.8043702809544941,
"recall": 0.634122092935016,
"f1": 0.7091715956399902,
"support": 60550
}
}
}