File size: 2,641 Bytes
a93e458
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
global {
    ducttape_output=/mnt/data/shared/multilingual_llm/experiments_megatron/wikipedia_llama2_all_10B_extend32
    repo=/mnt/data/jpombal/multilinguality_megatron

    external_model_dir=/mnt/data/shared/multilingual_llm/experiments_megatron/continue_pretraining_llama2_all_10B_extend32/checkpoints
    model_path=/mnt/data/bpop/multilinguality_tower/extended-models/llama-2-7b-hf-merged-multi-32k-meaninit
    tokenizer_path=/mnt/data/bpop/multilinguality_tower/extended-models/llama-2-7b-hf-merged-multi-32k-meaninit/tokenizer.model 

    train_language=(TrainLanguage: en de fr es it nl pt ru zh ko)
    
    threshold=(TrainLanguage:
                en=516
                es=275
                de=611
                fr=322
                nl=649
                pt=257
                it=332
                ru=334
                zh=2041
                ko=198
            )

    # number such that final tokens for each language are around 1B
    n_tokens=(TrainLanguage:
                en=1000000000
                es=833333330
                de=833333330
                fr=833333330
                nl=833333330
                pt=833333330
                it=833333330
                ru=500000000
                zh=13888888
                ko=250000000
            )
    
    dataset_path=(TrainLanguage: 
                en=/mnt/data/shared/multilingual_llm/tower_llm_wikipedia/en
                es=/mnt/data/shared/multilingual_llm/tower_llm_wikipedia/es
                de=/mnt/data/shared/multilingual_llm/tower_llm_wikipedia/de
                fr=/mnt/data/shared/multilingual_llm/tower_llm_wikipedia/fr
                nl=/mnt/data/shared/multilingual_llm/tower_llm_wikipedia/nl
                pt=/mnt/data/shared/multilingual_llm/tower_llm_wikipedia/pt             
                it=/mnt/data/shared/multilingual_llm/tower_llm_wikipedia/it
                ru=/mnt/data/shared/multilingual_llm/tower_llm_wikipedia/ru
                zh=/mnt/data/shared/multilingual_llm/tower_llm_wikipedia/zh
                ko=/mnt/data/shared/multilingual_llm/tower_llm_wikipedia/ko
            )

    mix="10 10 10 10 10 10 10 10 10 10"                 

    min_perplexity=50

    size=(Size: 7 13)

    log_interval=10
    save_interval=635
    eval_interval=635
    train_steps=6358
    
    lr_scheduler=cosine
    warmup_steps=63
    lr=3e-5
    lr_min=3e-6
    weight_decay=0.1
    
    n_gpus=8
    gpu_ids=0,1,2,3,4,5,6,7
    tp=(TP: 1 2 3 4)
    pp=(PP: 1 2 3 4)
    micro_batch_size=4
    grad_accum_steps=12
    vocab_size=52672

    cpu_workers=16
    wandb_run_id="wikipedia_extend32"
    wikipedia=True
}