|
#!/bin/bash |
|
|
|
megatron_model="/mnt/scratch-artemis/kshitij/LLAMA/latest_megatron_codebase/spgi_vox_mls_text_1b/megatron_model" |
|
model_path="/mnt/scratch-artemis/kshitij/LLAMA/latest_megatron_codebase/spgi_vox_mls_text_1b/extended_non_uniform_model" |
|
size="1" |
|
repo="/mnt/scratch-artemis/kshitij/LLAMA/latest_megatron_codebase/multilinguality_megatron" |
|
|
|
|
|
for arg in "$@" |
|
do |
|
case $arg in |
|
--help) |
|
echo "Usage: ./script.sh [OPTIONS]" |
|
echo "Options:" |
|
echo " --megatron_model=PATH Path to save converted model." |
|
echo " --model_path=PATH Path of HF directory of model to be converted." |
|
echo " --size=NUMBER Billion parameters of model." |
|
echo " --repo=PATH Path to repo." |
|
exit 0 |
|
;; |
|
--megatron_model=*) |
|
megatron_model="${arg#*=}" |
|
shift |
|
;; |
|
--model_path=*) |
|
model_path="${arg#*=}" |
|
shift |
|
;; |
|
--size=*) |
|
size="${arg#*=}" |
|
shift |
|
;; |
|
--repo=*) |
|
repo="${arg#*=}" |
|
shift |
|
;; |
|
esac |
|
done |
|
|
|
|
|
python $repo/weights_conversion/hf_to_megatron.py llama \ |
|
--size=$size \ |
|
--out=$megatron_model \ |
|
--cache-dir=$model_path \ |
|
--model-path=$model_path |
|
|