File size: 1,128 Bytes
44bfcd5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
#!/bin/bash
#SBATCH --job-name=tasky # job name
#SBATCH --ntasks=1                   # number of MP tasks
#SBATCH --nodes=1
#SBATCH --cpus-per-task=40           # number of cores per tasks
#SBATCH --hint=nomultithread         # we get physical cores not logical
#SBATCH --time=20:00:00             # maximum execution time (HH:MM:SS)
#SBATCH --output=%x-%j.out          # output file name
#SBATCH --account=ajs@cpu
#SBATCH --partition=cpu_p1
#SBATCH --qos=qos_cpu-t3

set -x -e
source $six_ALL_CCFRWORK/start-tr13f-6B3-ml-t0
MEGATRON_DEEPSPEED_REPO=$six_ALL_CCFRWORK/code/tr13f-6B3-ml-t0/Megatron-DeepSpeed
TOKENIZER_PATH="bigscience/tokenizer"

cat c4tasky_*.jsonl > merged_dups_c4tasky.jsonl
sort -u merged_dups_c4tasky.jsonl | shuf > merged_c4tasky.jsonl

cd $MEGATRON_DEEPSPEED_REPO
python tools/preprocess_data.py \
    --input /gpfswork/rech/six/commun/code/turku/tasky/merged_c4tasky.jsonl \
    --output-prefix /gpfswork/rech/six/commun/code/turku/tasky/tasky_bloom \
    --dataset-impl mmap \
    --json-key text \
    --tokenizer-type PretrainedFromHF \
    --tokenizer-name-or-path $TOKENIZER_PATH \
    --workers 35