logical-reasoning / scripts /tune-mgtv-llama3_8b_en.sh
inflaton's picture
llama3 r4 p2 results
46c075a
raw
history blame
906 Bytes
#!/bin/sh
BASEDIR=$(dirname "$0")
cd $BASEDIR/..
echo Current Directory:
pwd
BASEDIR=`pwd`
nvidia-smi
uname -a
cat /etc/os-release
lscpu
grep MemTotal /proc/meminfo
#pip install -r requirements.txt
#cd ../LLaMA-Factory && pip install -e .[torch,bitsandbytes] && cd $BASEDIR
#pip install transformers==4.41.2
#pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu121
pip install -U transformers
export LOGICAL_REASONING_DATA_PATH=datasets/mgtv
export MODEL_NAME=meta-llama/Meta-Llama-3.1-8B-Instruct
export MODEL_PREFIX=llama3-8b_lora_sft_bf16
export CONFIG_FILE=config/$MODEL_PREFIX-p1_en.yaml
echo "Tuning $MODEL_NAME with $CONFIG_FILE"
$BASEDIR/scripts/tune-lf.sh $CONFIG_FILE
export CONFIG_FILE=config/$MODEL_PREFIX-p2_en.yaml
echo "Tuning $MODEL_NAME with $CONFIG_FILE"
$BASEDIR/scripts/tune-lf.sh $CONFIG_FILE
$BASEDIR/scripts/eval-mgtv-llama3_8b_en.sh