bloomz-560m / evaluation_val /mlsum_es /slim.limited=3000.model=350mt0_xp3capmixnewcodelonglossseqglobal_step1750.task=mlsum_es.templates=palm_prompt.fewshot=0.batchsize=16.seed=1234.timestamp=2022-10-08T13:34:03.json
Muennighoff's picture
Add files
4e82575
raw
history blame
No virus
3.93 kB
{
"results": [
{
"task_name": "mlsum_es",
"prompt_name": "palm_prompt",
"bleu": 3.619325486335941,
"dataset_path": "GEM/mlsum",
"dataset_name": "es",
"subset": "",
"bleu_stderr": 0.179919419902639
},
{
"task_name": "mlsum_es",
"prompt_name": "palm_prompt",
"rouge1_precision": 0.24780663444349338,
"dataset_path": "GEM/mlsum",
"dataset_name": "es",
"subset": "",
"rouge1_precision_stderr": 0.002507147956018597
},
{
"task_name": "mlsum_es",
"prompt_name": "palm_prompt",
"rouge1_recall": 0.27545135338282833,
"dataset_path": "GEM/mlsum",
"dataset_name": "es",
"subset": "",
"rouge1_recall_stderr": 0.003003125501891925
},
{
"task_name": "mlsum_es",
"prompt_name": "palm_prompt",
"rouge1_fmeasure": 0.23676009275524143,
"dataset_path": "GEM/mlsum",
"dataset_name": "es",
"subset": "",
"rouge1_fmeasure_stderr": 0.0021301319931734007
},
{
"task_name": "mlsum_es",
"prompt_name": "palm_prompt",
"rouge2_precision": 0.0640710564358735,
"dataset_path": "GEM/mlsum",
"dataset_name": "es",
"subset": "",
"rouge2_precision_stderr": 0.0017900238806753247
},
{
"task_name": "mlsum_es",
"prompt_name": "palm_prompt",
"rouge2_recall": 0.07767444430106467,
"dataset_path": "GEM/mlsum",
"dataset_name": "es",
"subset": "",
"rouge2_recall_stderr": 0.0021965535248787645
},
{
"task_name": "mlsum_es",
"prompt_name": "palm_prompt",
"rouge2_fmeasure": 0.06294952611868992,
"dataset_path": "GEM/mlsum",
"dataset_name": "es",
"subset": "",
"rouge2_fmeasure_stderr": 0.0016498514297430393
},
{
"task_name": "mlsum_es",
"prompt_name": "palm_prompt",
"rougeL_precision": 0.1901311887539964,
"dataset_path": "GEM/mlsum",
"dataset_name": "es",
"subset": "",
"rougeL_precision_stderr": 0.002107469317872449
},
{
"task_name": "mlsum_es",
"prompt_name": "palm_prompt",
"rougeL_recall": 0.2111905723569327,
"dataset_path": "GEM/mlsum",
"dataset_name": "es",
"subset": "",
"rougeL_recall_stderr": 0.002518349475307121
},
{
"task_name": "mlsum_es",
"prompt_name": "palm_prompt",
"rougeL_fmeasure": 0.18117329279306474,
"dataset_path": "GEM/mlsum",
"dataset_name": "es",
"subset": "",
"rougeL_fmeasure_stderr": 0.0017908424050208493
},
{
"task_name": "mlsum_es",
"prompt_name": "palm_prompt",
"rougeLsum_precision": 0.19470439311305227,
"dataset_path": "GEM/mlsum",
"dataset_name": "es",
"subset": "",
"rougeLsum_precision_stderr": 0.002151332052117866
},
{
"task_name": "mlsum_es",
"prompt_name": "palm_prompt",
"rougeLsum_recall": 0.21735203759781344,
"dataset_path": "GEM/mlsum",
"dataset_name": "es",
"subset": "",
"rougeLsum_recall_stderr": 0.0026235912805025272
},
{
"task_name": "mlsum_es",
"prompt_name": "palm_prompt",
"rougeLsum_fmeasure": 0.18538806816923076,
"dataset_path": "GEM/mlsum",
"dataset_name": "es",
"subset": "",
"rougeLsum_fmeasure_stderr": 0.0018064286752298999
}
],
"config": {
"model": "hf-causal",
"model_args": "pretrained=/gpfsscratch/rech/six/commun/commun/experiments/muennighoff/bloomckpt/350mt0/xp3capmixnewcodelonglossseqglobal_step1750,use_accelerate=True,tokenizer=/gpfsscratch/rech/six/commun/commun/experiments/muennighoff/bloomckpt/350mt0/xp3capmixnewcodelonglossseqglobal_step1750,dtype=float16",
"num_fewshot": 0,
"batch_size": 16,
"device": "cuda",
"use_cache": false,
"limit": 3000,
"bootstrap_iters": 10,
"seed": 1234
}
}