Muennighoff
commited on
Commit
•
110b61c
1
Parent(s):
f661612
Add script
Browse files- to_meg_ru.slurm +110 -0
to_meg_ru.slurm
ADDED
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
#SBATCH --ntasks=1 # number of MP tasks
|
3 |
+
#SBATCH --nodes=1
|
4 |
+
#SBATCH --cpus-per-task=64 # number of cores per tasks
|
5 |
+
#SBATCH --hint=nomultithread # we get physical cores not logical
|
6 |
+
#SBATCH --time=20:00:00 # maximum execution time (HH:MM:SS)
|
7 |
+
#SBATCH --output=%x-%j.out # output file name
|
8 |
+
#SBATCH --account=project_462000119
|
9 |
+
#SBATCH --partition=small
|
10 |
+
|
11 |
+
set -x -e
|
12 |
+
|
13 |
+
#source $six_ALL_CCFRWORK/start-tr13f-6B3-ml-t0
|
14 |
+
source /scratch/project_462000119/muennighoff/nov-2022-bettercom/venv/bin/activate
|
15 |
+
export HF_DATASETS_OFFLINE=1
|
16 |
+
export TRANSFORMERS_OFFLINE=1
|
17 |
+
|
18 |
+
#MEGATRON_DEEPSPEED_REPO=$six_ALL_CCFRWORK/code/tr13f-6B3-ml-t0/Megatron-DeepSpeed
|
19 |
+
MEGATRON_DEEPSPEED_REPO=/scratch/project_462000119/muennighoff/nov-2022-mtf/Megatron-DeepSpeed
|
20 |
+
|
21 |
+
TOKENIZER_PATH="bigscience/tokenizer"
|
22 |
+
|
23 |
+
LANGS=(
|
24 |
+
ak
|
25 |
+
ar
|
26 |
+
as
|
27 |
+
bm
|
28 |
+
bn
|
29 |
+
ca
|
30 |
+
code
|
31 |
+
en
|
32 |
+
es
|
33 |
+
eu
|
34 |
+
fon
|
35 |
+
fr
|
36 |
+
gu
|
37 |
+
hi
|
38 |
+
id
|
39 |
+
ig
|
40 |
+
ki
|
41 |
+
kn
|
42 |
+
lg
|
43 |
+
ln
|
44 |
+
ml
|
45 |
+
mr
|
46 |
+
ne
|
47 |
+
nso
|
48 |
+
ny
|
49 |
+
or
|
50 |
+
pa
|
51 |
+
pt
|
52 |
+
rn
|
53 |
+
rw
|
54 |
+
sn
|
55 |
+
st
|
56 |
+
sw
|
57 |
+
ta
|
58 |
+
te
|
59 |
+
tn
|
60 |
+
ts
|
61 |
+
tum
|
62 |
+
tw
|
63 |
+
ur
|
64 |
+
vi
|
65 |
+
wo
|
66 |
+
xh
|
67 |
+
yo
|
68 |
+
zh
|
69 |
+
zu
|
70 |
+
)
|
71 |
+
|
72 |
+
LANGS=(
|
73 |
+
ru
|
74 |
+
)
|
75 |
+
|
76 |
+
#DATA_PATH=/gpfswork/rech/six/commun/bigscience-training/jsonls/xp3cappedmixednewcodelong
|
77 |
+
#OUTPUT=/gpfswork/rech/six/commun/bigscience-training/xp3cappedmixednewcodelong
|
78 |
+
DATA_PATH=/scratch/project_462000119/muennighoff/nov-2022-mtf/xp3ru/ru
|
79 |
+
OUTPUT=/scratch/project_462000119/muennighoff/nov-2022-mtf/xp3rumegds
|
80 |
+
|
81 |
+
mkdir -p $OUTPUT
|
82 |
+
|
83 |
+
for val in {0..1}; do
|
84 |
+
LANG=${LANGS[$val]}
|
85 |
+
cd $DATA_PATH
|
86 |
+
# Merge
|
87 |
+
cat *.jsonl > merged_dups_$LANG.jsonl
|
88 |
+
# Drop duplicates (~1G / 37G for en) + Shuffle
|
89 |
+
sort -u merged_dups_$LANG.jsonl | shuf > merged_$LANG.jsonl
|
90 |
+
cd $MEGATRON_DEEPSPEED_REPO
|
91 |
+
python tools/preprocess_data.py \
|
92 |
+
--input $DATA_PATH/merged_$LANG.jsonl \
|
93 |
+
--output-prefix $OUTPUT/xp3_$LANG \
|
94 |
+
--dataset-impl mmap \
|
95 |
+
--json-key inputs \
|
96 |
+
--tokenizer-type PretrainedFromHF \
|
97 |
+
--tokenizer-name-or-path $TOKENIZER_PATH \
|
98 |
+
--workers 60
|
99 |
+
python tools/preprocess_data.py \
|
100 |
+
--input $DATA_PATH/merged_$LANG.jsonl \
|
101 |
+
--output-prefix $OUTPUT/xp3_$LANG \
|
102 |
+
--dataset-impl mmap \
|
103 |
+
--json-key targets \
|
104 |
+
--tokenizer-type PretrainedFromHF \
|
105 |
+
--tokenizer-name-or-path $TOKENIZER_PATH \
|
106 |
+
--append-eod \
|
107 |
+
--prepend-space \
|
108 |
+
--workers 60
|
109 |
+
done
|
110 |
+
|