File size: 623 Bytes
a93e458
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
step_size=670
model_type=llama2
ckpt_path=/mnt/data/shared/multilingual_llm/experiments_megatron/continue_pretraining_tinyllama_all_20B/mc4_parallel_synth_checkpoints/
name=flavio

for i in {1..10}; do
    iter=`expr $i \* $step_size`
    echo $iter
    echo ${iter} > $ckpt_path/latest_checkpointed_iteration.txt
    bash deploy.sh \
        -p $ckpt_path \
        -v "32000" \
        -m "tiny_llama_${name}_${i}b_tokens" \
        -t $model_type \
        #-f /mnt/data/bpop/multilinguality_tower/extended-models/llama-2-7b-hf-merged-multi-32k-meaninit
done

#echo "6350" > $ckpt_path/latest_checkpointed_iteration.txt