cluster_loras / 22 /mmlu /config.txt
Muqeeth's picture
Upload folder using huggingface_hub
9224edf verified
raw
history blame
892 Bytes
# Parameters for EvaluationArguments:
# ==============================================================================
EvaluationArguments.base_model_name_or_path = 'meta-llama/Llama-2-7b-hf'
EvaluationArguments.batch_size = 8
EvaluationArguments.benchmark = 'mmlu'
EvaluationArguments.chat_formatting_function = \
'src.eval.templates.create_prompt_with_tulu_chat_format'
EvaluationArguments.data_dir = '${DATA_CACHE}/eval/mmlu'
EvaluationArguments.hf_upload_name = None
EvaluationArguments.load_in_8bit = False
EvaluationArguments.n_instances = None
EvaluationArguments.no_cot = False
EvaluationArguments.num_shots = 0
EvaluationArguments.peft_type = 'lora'
EvaluationArguments.save_dir = \
'/network/scratch/m/mohammed.muqeeth/supergoose/exp_out/cluster_loras/22/mmlu'
EvaluationArguments.seed = 42
EvaluationArguments.upload_to_hf = None
EvaluationArguments.use_chat_format = True