Text Generation
Transformers
ONNX
llama
sparse
instruct
deepsparse
shubhrapandit's picture
Update model files
ab97422
{
"results": {
"hendrycksTest-abstract_algebra": {
"acc": 0.28,
"acc_stderr": 0.04512608598542129,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542129
},
"hendrycksTest-anatomy": {
"acc": 0.42962962962962964,
"acc_stderr": 0.04276349494376599,
"acc_norm": 0.42962962962962964,
"acc_norm_stderr": 0.04276349494376599
},
"hendrycksTest-astronomy": {
"acc": 0.47368421052631576,
"acc_stderr": 0.04063302731486671,
"acc_norm": 0.47368421052631576,
"acc_norm_stderr": 0.04063302731486671
},
"hendrycksTest-business_ethics": {
"acc": 0.46,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620332
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.47547169811320755,
"acc_stderr": 0.030735822206205615,
"acc_norm": 0.47547169811320755,
"acc_norm_stderr": 0.030735822206205615
},
"hendrycksTest-college_biology": {
"acc": 0.375,
"acc_stderr": 0.04048439222695598,
"acc_norm": 0.375,
"acc_norm_stderr": 0.04048439222695598
},
"hendrycksTest-college_chemistry": {
"acc": 0.42,
"acc_stderr": 0.04960449637488584,
"acc_norm": 0.42,
"acc_norm_stderr": 0.04960449637488584
},
"hendrycksTest-college_computer_science": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"hendrycksTest-college_mathematics": {
"acc": 0.26,
"acc_stderr": 0.044084400227680794,
"acc_norm": 0.26,
"acc_norm_stderr": 0.044084400227680794
},
"hendrycksTest-college_medicine": {
"acc": 0.4046242774566474,
"acc_stderr": 0.03742461193887248,
"acc_norm": 0.4046242774566474,
"acc_norm_stderr": 0.03742461193887248
},
"hendrycksTest-college_physics": {
"acc": 0.21568627450980393,
"acc_stderr": 0.04092563958237654,
"acc_norm": 0.21568627450980393,
"acc_norm_stderr": 0.04092563958237654
},
"hendrycksTest-computer_security": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"hendrycksTest-conceptual_physics": {
"acc": 0.34893617021276596,
"acc_stderr": 0.031158522131357787,
"acc_norm": 0.34893617021276596,
"acc_norm_stderr": 0.031158522131357787
},
"hendrycksTest-econometrics": {
"acc": 0.2807017543859649,
"acc_stderr": 0.042270544512322,
"acc_norm": 0.2807017543859649,
"acc_norm_stderr": 0.042270544512322
},
"hendrycksTest-electrical_engineering": {
"acc": 0.4482758620689655,
"acc_stderr": 0.04144311810878152,
"acc_norm": 0.4482758620689655,
"acc_norm_stderr": 0.04144311810878152
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.291005291005291,
"acc_stderr": 0.02339382650048487,
"acc_norm": 0.291005291005291,
"acc_norm_stderr": 0.02339382650048487
},
"hendrycksTest-formal_logic": {
"acc": 0.23015873015873015,
"acc_stderr": 0.037649508797906045,
"acc_norm": 0.23015873015873015,
"acc_norm_stderr": 0.037649508797906045
},
"hendrycksTest-global_facts": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"hendrycksTest-high_school_biology": {
"acc": 0.432258064516129,
"acc_stderr": 0.028181739720019413,
"acc_norm": 0.432258064516129,
"acc_norm_stderr": 0.028181739720019413
},
"hendrycksTest-high_school_chemistry": {
"acc": 0.3054187192118227,
"acc_stderr": 0.03240661565868408,
"acc_norm": 0.3054187192118227,
"acc_norm_stderr": 0.03240661565868408
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.43,
"acc_stderr": 0.04975698519562428,
"acc_norm": 0.43,
"acc_norm_stderr": 0.04975698519562428
},
"hendrycksTest-high_school_european_history": {
"acc": 0.5757575757575758,
"acc_stderr": 0.03859268142070265,
"acc_norm": 0.5757575757575758,
"acc_norm_stderr": 0.03859268142070265
},
"hendrycksTest-high_school_geography": {
"acc": 0.4797979797979798,
"acc_stderr": 0.035594435655639196,
"acc_norm": 0.4797979797979798,
"acc_norm_stderr": 0.035594435655639196
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.6010362694300518,
"acc_stderr": 0.03533999094065696,
"acc_norm": 0.6010362694300518,
"acc_norm_stderr": 0.03533999094065696
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.4128205128205128,
"acc_stderr": 0.024962683564331803,
"acc_norm": 0.4128205128205128,
"acc_norm_stderr": 0.024962683564331803
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.26666666666666666,
"acc_stderr": 0.02696242432507384,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.02696242432507384
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.40756302521008403,
"acc_stderr": 0.03191863374478465,
"acc_norm": 0.40756302521008403,
"acc_norm_stderr": 0.03191863374478465
},
"hendrycksTest-high_school_physics": {
"acc": 0.33112582781456956,
"acc_stderr": 0.038425817186598696,
"acc_norm": 0.33112582781456956,
"acc_norm_stderr": 0.038425817186598696
},
"hendrycksTest-high_school_psychology": {
"acc": 0.5082568807339449,
"acc_stderr": 0.021434399918214327,
"acc_norm": 0.5082568807339449,
"acc_norm_stderr": 0.021434399918214327
},
"hendrycksTest-high_school_statistics": {
"acc": 0.32407407407407407,
"acc_stderr": 0.03191923445686186,
"acc_norm": 0.32407407407407407,
"acc_norm_stderr": 0.03191923445686186
},
"hendrycksTest-high_school_us_history": {
"acc": 0.5588235294117647,
"acc_stderr": 0.034849415144292316,
"acc_norm": 0.5588235294117647,
"acc_norm_stderr": 0.034849415144292316
},
"hendrycksTest-high_school_world_history": {
"acc": 0.6455696202531646,
"acc_stderr": 0.031137304297185805,
"acc_norm": 0.6455696202531646,
"acc_norm_stderr": 0.031137304297185805
},
"hendrycksTest-human_aging": {
"acc": 0.4080717488789238,
"acc_stderr": 0.03298574607842822,
"acc_norm": 0.4080717488789238,
"acc_norm_stderr": 0.03298574607842822
},
"hendrycksTest-human_sexuality": {
"acc": 0.45038167938931295,
"acc_stderr": 0.04363643698524779,
"acc_norm": 0.45038167938931295,
"acc_norm_stderr": 0.04363643698524779
},
"hendrycksTest-international_law": {
"acc": 0.5867768595041323,
"acc_stderr": 0.04495087843548408,
"acc_norm": 0.5867768595041323,
"acc_norm_stderr": 0.04495087843548408
},
"hendrycksTest-jurisprudence": {
"acc": 0.42592592592592593,
"acc_stderr": 0.0478034362693679,
"acc_norm": 0.42592592592592593,
"acc_norm_stderr": 0.0478034362693679
},
"hendrycksTest-logical_fallacies": {
"acc": 0.4294478527607362,
"acc_stderr": 0.03889066619112722,
"acc_norm": 0.4294478527607362,
"acc_norm_stderr": 0.03889066619112722
},
"hendrycksTest-machine_learning": {
"acc": 0.25892857142857145,
"acc_stderr": 0.04157751539865629,
"acc_norm": 0.25892857142857145,
"acc_norm_stderr": 0.04157751539865629
},
"hendrycksTest-management": {
"acc": 0.5631067961165048,
"acc_stderr": 0.04911147107365777,
"acc_norm": 0.5631067961165048,
"acc_norm_stderr": 0.04911147107365777
},
"hendrycksTest-marketing": {
"acc": 0.5470085470085471,
"acc_stderr": 0.03261099873098618,
"acc_norm": 0.5470085470085471,
"acc_norm_stderr": 0.03261099873098618
},
"hendrycksTest-medical_genetics": {
"acc": 0.38,
"acc_stderr": 0.04878317312145633,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145633
},
"hendrycksTest-miscellaneous": {
"acc": 0.5696040868454662,
"acc_stderr": 0.01770586877629239,
"acc_norm": 0.5696040868454662,
"acc_norm_stderr": 0.01770586877629239
},
"hendrycksTest-moral_disputes": {
"acc": 0.43641618497109824,
"acc_stderr": 0.02670054542494368,
"acc_norm": 0.43641618497109824,
"acc_norm_stderr": 0.02670054542494368
},
"hendrycksTest-moral_scenarios": {
"acc": 0.2581005586592179,
"acc_stderr": 0.014635185616527836,
"acc_norm": 0.2581005586592179,
"acc_norm_stderr": 0.014635185616527836
},
"hendrycksTest-nutrition": {
"acc": 0.5065359477124183,
"acc_stderr": 0.028627470550556054,
"acc_norm": 0.5065359477124183,
"acc_norm_stderr": 0.028627470550556054
},
"hendrycksTest-philosophy": {
"acc": 0.4887459807073955,
"acc_stderr": 0.028390897396863533,
"acc_norm": 0.4887459807073955,
"acc_norm_stderr": 0.028390897396863533
},
"hendrycksTest-prehistory": {
"acc": 0.47530864197530864,
"acc_stderr": 0.027786800931427436,
"acc_norm": 0.47530864197530864,
"acc_norm_stderr": 0.027786800931427436
},
"hendrycksTest-professional_accounting": {
"acc": 0.3333333333333333,
"acc_stderr": 0.028121636040639893,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.028121636040639893
},
"hendrycksTest-professional_law": {
"acc": 0.333116036505867,
"acc_stderr": 0.012037930451512052,
"acc_norm": 0.333116036505867,
"acc_norm_stderr": 0.012037930451512052
},
"hendrycksTest-professional_medicine": {
"acc": 0.3492647058823529,
"acc_stderr": 0.028959755196824852,
"acc_norm": 0.3492647058823529,
"acc_norm_stderr": 0.028959755196824852
},
"hendrycksTest-professional_psychology": {
"acc": 0.4068627450980392,
"acc_stderr": 0.019873802005061177,
"acc_norm": 0.4068627450980392,
"acc_norm_stderr": 0.019873802005061177
},
"hendrycksTest-public_relations": {
"acc": 0.4818181818181818,
"acc_stderr": 0.04785964010794916,
"acc_norm": 0.4818181818181818,
"acc_norm_stderr": 0.04785964010794916
},
"hendrycksTest-security_studies": {
"acc": 0.4775510204081633,
"acc_stderr": 0.03197694118713672,
"acc_norm": 0.4775510204081633,
"acc_norm_stderr": 0.03197694118713672
},
"hendrycksTest-sociology": {
"acc": 0.5771144278606966,
"acc_stderr": 0.034932317774212816,
"acc_norm": 0.5771144278606966,
"acc_norm_stderr": 0.034932317774212816
},
"hendrycksTest-us_foreign_policy": {
"acc": 0.64,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.64,
"acc_norm_stderr": 0.048241815132442176
},
"hendrycksTest-virology": {
"acc": 0.42168674698795183,
"acc_stderr": 0.03844453181770917,
"acc_norm": 0.42168674698795183,
"acc_norm_stderr": 0.03844453181770917
},
"hendrycksTest-world_religions": {
"acc": 0.5847953216374269,
"acc_stderr": 0.03779275945503201,
"acc_norm": 0.5847953216374269,
"acc_norm_stderr": 0.03779275945503201
}
},
"versions": {
"hendrycksTest-abstract_algebra": 1,
"hendrycksTest-anatomy": 1,
"hendrycksTest-astronomy": 1,
"hendrycksTest-business_ethics": 1,
"hendrycksTest-clinical_knowledge": 1,
"hendrycksTest-college_biology": 1,
"hendrycksTest-college_chemistry": 1,
"hendrycksTest-college_computer_science": 1,
"hendrycksTest-college_mathematics": 1,
"hendrycksTest-college_medicine": 1,
"hendrycksTest-college_physics": 1,
"hendrycksTest-computer_security": 1,
"hendrycksTest-conceptual_physics": 1,
"hendrycksTest-econometrics": 1,
"hendrycksTest-electrical_engineering": 1,
"hendrycksTest-elementary_mathematics": 1,
"hendrycksTest-formal_logic": 1,
"hendrycksTest-global_facts": 1,
"hendrycksTest-high_school_biology": 1,
"hendrycksTest-high_school_chemistry": 1,
"hendrycksTest-high_school_computer_science": 1,
"hendrycksTest-high_school_european_history": 1,
"hendrycksTest-high_school_geography": 1,
"hendrycksTest-high_school_government_and_politics": 1,
"hendrycksTest-high_school_macroeconomics": 1,
"hendrycksTest-high_school_mathematics": 1,
"hendrycksTest-high_school_microeconomics": 1,
"hendrycksTest-high_school_physics": 1,
"hendrycksTest-high_school_psychology": 1,
"hendrycksTest-high_school_statistics": 1,
"hendrycksTest-high_school_us_history": 1,
"hendrycksTest-high_school_world_history": 1,
"hendrycksTest-human_aging": 1,
"hendrycksTest-human_sexuality": 1,
"hendrycksTest-international_law": 1,
"hendrycksTest-jurisprudence": 1,
"hendrycksTest-logical_fallacies": 1,
"hendrycksTest-machine_learning": 1,
"hendrycksTest-management": 1,
"hendrycksTest-marketing": 1,
"hendrycksTest-medical_genetics": 1,
"hendrycksTest-miscellaneous": 1,
"hendrycksTest-moral_disputes": 1,
"hendrycksTest-moral_scenarios": 1,
"hendrycksTest-nutrition": 1,
"hendrycksTest-philosophy": 1,
"hendrycksTest-prehistory": 1,
"hendrycksTest-professional_accounting": 1,
"hendrycksTest-professional_law": 1,
"hendrycksTest-professional_medicine": 1,
"hendrycksTest-professional_psychology": 1,
"hendrycksTest-public_relations": 1,
"hendrycksTest-security_studies": 1,
"hendrycksTest-sociology": 1,
"hendrycksTest-us_foreign_policy": 1,
"hendrycksTest-virology": 1,
"hendrycksTest-world_religions": 1
},
"config": {
"model": "sparseml",
"model_args": "pretrained=/cache/shubhra/models/platypus_dolphin/cerebras/spft-cerebras_llama2_sparse70_platypus_dolphin_KDFalse_GCTrue_LR1e-4_E4_quant_smooth8,trust_remote_code=True,dtype=bfloat16",
"num_fewshot": 5,
"batch_size": "8",
"batch_sizes": [],
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}