|
{ |
|
"config_general": { |
|
"start_date": "2024-04-02T00-29-33.604027", |
|
"start_time": 1712017774.6061072, |
|
"end_time": 1712027572.1964915, |
|
"total_evaluation_time_seconds": 9797.590384244919, |
|
"has_chat_template": true, |
|
"chat_type": "system_user_assistant", |
|
"n_gpus": 1, |
|
"accelerate_num_process": null, |
|
"model_sha": "8ffe1eb804ec3a4aa9010e798db8420bd1b6faab", |
|
"model_dtype": "bfloat16", |
|
"model_memory_footprint": 13678166016, |
|
"model_num_parameters": 6738415616, |
|
"model_is_loaded_in_4bit": null, |
|
"model_is_loaded_in_8bit": null, |
|
"model_is_quantized": null, |
|
"model_device": "cuda:0", |
|
"batch_size": 16, |
|
"max_length": 2560, |
|
"max_ctx_length": 2528, |
|
"max_gen_toks": 32, |
|
"model_name": "allenai/tulu-2-dpo-7b", |
|
"job_id": 330, |
|
"model_id": "allenai/tulu-2-dpo-7b_eval_request_False_bfloat16_Original", |
|
"model_base_model": "", |
|
"model_weight_type": "Original", |
|
"model_revision": "main", |
|
"model_private": false, |
|
"model_type": "💬 : chat models (RLHF, DPO, IFT, ...)", |
|
"model_architectures": "LlamaForCausalLM", |
|
"submitted_time": "2024-03-05T16:37:20Z", |
|
"lm_eval_model_type": "huggingface", |
|
"eval_version": "1.1.0" |
|
}, |
|
"results": { |
|
"all_grouped_average": 0.63949781885722, |
|
"all_grouped_npm": 0.47383474887492505, |
|
"all_grouped": { |
|
"enem_challenge": 0.5150454863540938, |
|
"bluex": 0.44089012517385257, |
|
"oab_exams": 0.3826879271070615, |
|
"assin2_rte": 0.882693297611393, |
|
"assin2_sts": 0.6797107988195017, |
|
"faquad_nli": 0.6767812942008486, |
|
"hatebr_offensive": 0.8485191831468404, |
|
"portuguese_hate_speech": 0.6881970837369898, |
|
"tweetsentbr": 0.6409551735643995 |
|
}, |
|
"all": { |
|
"harness|enem_challenge|enem_challenge|None|3": 0.5150454863540938, |
|
"harness|bluex|bluex|None|3": 0.44089012517385257, |
|
"harness|oab_exams|oab_exams|None|3": 0.3826879271070615, |
|
"harness|assin2_rte|assin2_rte|None|15": 0.882693297611393, |
|
"harness|assin2_sts|assin2_sts|None|15": 0.6797107988195017, |
|
"harness|faquad_nli|faquad_nli|None|15": 0.6767812942008486, |
|
"harness|hatebr_offensive|hatebr_offensive|None|25": 0.8485191831468404, |
|
"harness|portuguese_hate_speech|portuguese_hate_speech|None|25": 0.6881970837369898, |
|
"harness|tweetsentbr|tweetsentbr|None|25": 0.6409551735643995 |
|
}, |
|
"harness|enem_challenge|enem_challenge|None|3": { |
|
"acc,all": 0.5150454863540938, |
|
"acc,exam_id__2022": 0.5413533834586466, |
|
"acc,exam_id__2016": 0.5371900826446281, |
|
"acc,exam_id__2009": 0.4956521739130435, |
|
"acc,exam_id__2011": 0.5042735042735043, |
|
"acc,exam_id__2014": 0.5229357798165137, |
|
"acc,exam_id__2010": 0.5128205128205128, |
|
"acc,exam_id__2012": 0.49137931034482757, |
|
"acc,exam_id__2016_2": 0.5121951219512195, |
|
"acc,exam_id__2023": 0.5407407407407407, |
|
"acc,exam_id__2013": 0.49074074074074076, |
|
"acc,exam_id__2015": 0.5126050420168067, |
|
"acc,exam_id__2017": 0.5086206896551724, |
|
"main_score": 0.5150454863540938 |
|
}, |
|
"harness|bluex|bluex|None|3": { |
|
"acc,all": 0.44089012517385257, |
|
"acc,exam_id__UNICAMP_2024": 0.7111111111111111, |
|
"acc,exam_id__UNICAMP_2020": 0.4727272727272727, |
|
"acc,exam_id__UNICAMP_2022": 0.5128205128205128, |
|
"acc,exam_id__USP_2020": 0.5, |
|
"acc,exam_id__USP_2019": 0.45, |
|
"acc,exam_id__USP_2018": 0.35185185185185186, |
|
"acc,exam_id__USP_2021": 0.3076923076923077, |
|
"acc,exam_id__USP_2022": 0.42857142857142855, |
|
"acc,exam_id__UNICAMP_2018": 0.24074074074074073, |
|
"acc,exam_id__UNICAMP_2019": 0.4, |
|
"acc,exam_id__UNICAMP_2021_1": 0.391304347826087, |
|
"acc,exam_id__USP_2024": 0.5853658536585366, |
|
"acc,exam_id__USP_2023": 0.4318181818181818, |
|
"acc,exam_id__UNICAMP_2021_2": 0.4117647058823529, |
|
"acc,exam_id__UNICAMP_2023": 0.5116279069767442, |
|
"main_score": 0.44089012517385257 |
|
}, |
|
"harness|oab_exams|oab_exams|None|3": { |
|
"acc,all": 0.3826879271070615, |
|
"acc,exam_id__2010-02": 0.45, |
|
"acc,exam_id__2012-08": 0.4625, |
|
"acc,exam_id__2017-24": 0.45, |
|
"acc,exam_id__2017-22": 0.4625, |
|
"acc,exam_id__2012-06": 0.35, |
|
"acc,exam_id__2012-06a": 0.375, |
|
"acc,exam_id__2013-11": 0.35, |
|
"acc,exam_id__2012-07": 0.3625, |
|
"acc,exam_id__2016-19": 0.46153846153846156, |
|
"acc,exam_id__2015-17": 0.34615384615384615, |
|
"acc,exam_id__2014-14": 0.3375, |
|
"acc,exam_id__2013-10": 0.325, |
|
"acc,exam_id__2016-21": 0.425, |
|
"acc,exam_id__2014-15": 0.44871794871794873, |
|
"acc,exam_id__2011-03": 0.30303030303030304, |
|
"acc,exam_id__2013-12": 0.425, |
|
"acc,exam_id__2016-20a": 0.3125, |
|
"acc,exam_id__2018-25": 0.4125, |
|
"acc,exam_id__2011-05": 0.4375, |
|
"acc,exam_id__2015-16": 0.3625, |
|
"acc,exam_id__2015-18": 0.35, |
|
"acc,exam_id__2014-13": 0.35, |
|
"acc,exam_id__2016-20": 0.425, |
|
"acc,exam_id__2011-04": 0.3375, |
|
"acc,exam_id__2012-09": 0.36363636363636365, |
|
"acc,exam_id__2010-01": 0.32941176470588235, |
|
"acc,exam_id__2017-23": 0.325, |
|
"main_score": 0.3826879271070615 |
|
}, |
|
"harness|assin2_rte|assin2_rte|None|15": { |
|
"f1_macro,all": 0.882693297611393, |
|
"acc,all": 0.8827614379084967, |
|
"main_score": 0.882693297611393 |
|
}, |
|
"harness|assin2_sts|assin2_sts|None|15": { |
|
"pearson,all": 0.6797107988195017, |
|
"mse,all": 0.8555351307189543, |
|
"main_score": 0.6797107988195017 |
|
}, |
|
"harness|faquad_nli|faquad_nli|None|15": { |
|
"f1_macro,all": 0.6767812942008486, |
|
"acc,all": 0.7230769230769231, |
|
"main_score": 0.6767812942008486 |
|
}, |
|
"harness|hatebr_offensive|hatebr_offensive|None|25": { |
|
"f1_macro,all": 0.8485191831468404, |
|
"acc,all": 0.8485714285714285, |
|
"main_score": 0.8485191831468404 |
|
}, |
|
"harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { |
|
"f1_macro,all": 0.6881970837369898, |
|
"acc,all": 0.7226792009400705, |
|
"main_score": 0.6881970837369898 |
|
}, |
|
"harness|tweetsentbr|tweetsentbr|None|25": { |
|
"f1_macro,all": 0.6409551735643995, |
|
"acc,all": 0.6681592039800995, |
|
"main_score": 0.6409551735643995 |
|
} |
|
}, |
|
"config_tasks": { |
|
"harness|enem_challenge|enem_challenge": "LM Harness task", |
|
"harness|bluex|bluex": "LM Harness task", |
|
"harness|oab_exams|oab_exams": "LM Harness task", |
|
"harness|assin2_rte|assin2_rte": "LM Harness task", |
|
"harness|assin2_sts|assin2_sts": "LM Harness task", |
|
"harness|faquad_nli|faquad_nli": "LM Harness task", |
|
"harness|hatebr_offensive|hatebr_offensive": "LM Harness task", |
|
"harness|portuguese_hate_speech|portuguese_hate_speech": "LM Harness task", |
|
"harness|tweetsentbr|tweetsentbr": "LM Harness task" |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|enem_challenge|enem_challenge": 1.1, |
|
"harness|bluex|bluex": 1.1, |
|
"harness|oab_exams|oab_exams": 1.5, |
|
"harness|assin2_rte|assin2_rte": 1.1, |
|
"harness|assin2_sts|assin2_sts": 1.1, |
|
"harness|faquad_nli|faquad_nli": 1.1, |
|
"harness|hatebr_offensive|hatebr_offensive": 1.0, |
|
"harness|portuguese_hate_speech|portuguese_hate_speech": 1.0, |
|
"harness|tweetsentbr|tweetsentbr": 1.0 |
|
}, |
|
"summary_tasks": { |
|
"harness|enem_challenge|enem_challenge|None|3": { |
|
"sample_size": 1429, |
|
"truncated": 1, |
|
"non_truncated": 1428, |
|
"padded": 0, |
|
"non_padded": 1429, |
|
"fewshots_truncated": 1, |
|
"mean_seq_length": 1538.9881035689293, |
|
"min_seq_length": 1286, |
|
"max_seq_length": 2578, |
|
"max_ctx_length": 2528, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 3.0, |
|
"mean_effective_fewshot_size": 2.9993002099370187 |
|
}, |
|
"harness|bluex|bluex|None|3": { |
|
"sample_size": 719, |
|
"truncated": 0, |
|
"non_truncated": 719, |
|
"padded": 0, |
|
"non_padded": 719, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 1659.7426981919332, |
|
"min_seq_length": 1293, |
|
"max_seq_length": 2419, |
|
"max_ctx_length": 2528, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 3.0, |
|
"mean_effective_fewshot_size": 3.0 |
|
}, |
|
"harness|oab_exams|oab_exams|None|3": { |
|
"sample_size": 2195, |
|
"truncated": 0, |
|
"non_truncated": 2195, |
|
"padded": 0, |
|
"non_padded": 2195, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 1308.4145785876992, |
|
"min_seq_length": 1053, |
|
"max_seq_length": 1790, |
|
"max_ctx_length": 2528, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 3.0, |
|
"mean_effective_fewshot_size": 3.0 |
|
}, |
|
"harness|assin2_rte|assin2_rte|None|15": { |
|
"sample_size": 2448, |
|
"truncated": 0, |
|
"non_truncated": 2448, |
|
"padded": 0, |
|
"non_padded": 2448, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 1490.9889705882354, |
|
"min_seq_length": 1468, |
|
"max_seq_length": 1557, |
|
"max_ctx_length": 2528, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 15.0, |
|
"mean_effective_fewshot_size": 15.0 |
|
}, |
|
"harness|assin2_sts|assin2_sts|None|15": { |
|
"sample_size": 2448, |
|
"truncated": 0, |
|
"non_truncated": 2448, |
|
"padded": 0, |
|
"non_padded": 2448, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 1690.9889705882354, |
|
"min_seq_length": 1668, |
|
"max_seq_length": 1757, |
|
"max_ctx_length": 2528, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 15.0, |
|
"mean_effective_fewshot_size": 15.0 |
|
}, |
|
"harness|faquad_nli|faquad_nli|None|15": { |
|
"sample_size": 650, |
|
"truncated": 0, |
|
"non_truncated": 650, |
|
"padded": 0, |
|
"non_padded": 650, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 1684.1184615384616, |
|
"min_seq_length": 1632, |
|
"max_seq_length": 1791, |
|
"max_ctx_length": 2528, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 15.0, |
|
"mean_effective_fewshot_size": 15.0 |
|
}, |
|
"harness|hatebr_offensive|hatebr_offensive|None|25": { |
|
"sample_size": 1400, |
|
"truncated": 0, |
|
"non_truncated": 1400, |
|
"padded": 0, |
|
"non_padded": 1400, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 1598.9178571428572, |
|
"min_seq_length": 1575, |
|
"max_seq_length": 1845, |
|
"max_ctx_length": 2528, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 25.0, |
|
"mean_effective_fewshot_size": 25.0 |
|
}, |
|
"harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { |
|
"sample_size": 851, |
|
"truncated": 0, |
|
"non_truncated": 851, |
|
"padded": 0, |
|
"non_padded": 851, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 2087.801410105758, |
|
"min_seq_length": 2053, |
|
"max_seq_length": 2131, |
|
"max_ctx_length": 2528, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 25.0, |
|
"mean_effective_fewshot_size": 25.0 |
|
}, |
|
"harness|tweetsentbr|tweetsentbr|None|25": { |
|
"sample_size": 2010, |
|
"truncated": 0, |
|
"non_truncated": 2010, |
|
"padded": 0, |
|
"non_padded": 2010, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 1862.6845771144278, |
|
"min_seq_length": 1841, |
|
"max_seq_length": 1980, |
|
"max_ctx_length": 2528, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 25.0, |
|
"mean_effective_fewshot_size": 25.0 |
|
} |
|
}, |
|
"summary_general": { |
|
"truncated": 1, |
|
"non_truncated": 14149, |
|
"padded": 0, |
|
"non_padded": 14150, |
|
"fewshots_truncated": 1 |
|
} |
|
} |