small
Browse files- __pycache__/tasks.cpython-38.pyc +0 -0
- finetune_small_byt5_sentencefix.gin +41 -0
- train_small.sh +12 -0
__pycache__/tasks.cpython-38.pyc
CHANGED
Binary files a/__pycache__/tasks.cpython-38.pyc and b/__pycache__/tasks.cpython-38.pyc differ
|
|
finetune_small_byt5_sentencefix.gin
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __gin__ import dynamic_registration
|
2 |
+
import tasks
|
3 |
+
|
4 |
+
import __main__ as train_script
|
5 |
+
from t5.data import mixtures
|
6 |
+
from t5x import models
|
7 |
+
from t5x import partitioning
|
8 |
+
from t5x import utils
|
9 |
+
|
10 |
+
include "t5x/examples/t5/byt5/small.gin"
|
11 |
+
include "t5x/configs/runs/finetune.gin"
|
12 |
+
|
13 |
+
MIXTURE_OR_TASK_NAME = "sentencefix"
|
14 |
+
TASK_FEATURE_LENGTHS = {"inputs": 256, "targets": 256}
|
15 |
+
TRAIN_STEPS = 1_100_000 # 1000000 pre-trained steps + 100000 fine-tuning steps.
|
16 |
+
USE_CACHED_TASKS = False
|
17 |
+
DROPOUT_RATE = 0.0
|
18 |
+
RANDOM_SEED = 0
|
19 |
+
|
20 |
+
# `LOSS_NORMALIZING_FACTOR`: When fine-tuning a model that was pre-trained
|
21 |
+
# using Mesh Tensorflow (e.g. the public T5 / mT5 / ByT5 models), this should be
|
22 |
+
# set to `pretraining batch_size` * `target_token_length`. For T5 and T5.1.1:
|
23 |
+
# `2048 * 114`. For mT5: `1024 * 229`. For ByT5: `1024 * 189`.
|
24 |
+
LOSS_NORMALIZING_FACTOR =193536
|
25 |
+
INITIAL_CHECKPOINT_PATH = "gs://t5-data/pretrained_models/byt5/small/model.ckpt-1000000"
|
26 |
+
|
27 |
+
train_script.train:
|
28 |
+
eval_period = 500
|
29 |
+
partitioner = @partitioning.ModelBasedPjitPartitioner()
|
30 |
+
|
31 |
+
# `num_decodes` is equivalent to a beam size in a beam search decoding.
|
32 |
+
models.EncoderDecoderModel.predict_batch_with_aux.num_decodes = 4
|
33 |
+
|
34 |
+
partitioning.ModelBasedPjitPartitioner.num_partitions = 2
|
35 |
+
|
36 |
+
|
37 |
+
#from t5.models import mesh_transformer
|
38 |
+
#import t5.models
|
39 |
+
#mesh_transformer.learning_rate_schedules.constant_learning_rate.learning_rate = 0.0005
|
40 |
+
#run.learning_rate_schedule = @learning_rate_schedules.constant_learning_rate
|
41 |
+
|
train_small.sh
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
PROJECT_DIR=${HOME}"/models/multi-sentencefix-byt5"
|
2 |
+
T5X_DIR="../../t5x" # directory where the t5x is cloned.
|
3 |
+
TFDS_DATA_DIR="gs://nb-t5x/corpus_multi_sentencefix_byt5"
|
4 |
+
MODEL_DIR="gs://nb-t5x/small_model_multi_sentencefix_byt5"
|
5 |
+
export PYTHONPATH=${PROJECT_DIR}
|
6 |
+
|
7 |
+
python3 ${T5X_DIR}/t5x/train.py \
|
8 |
+
--gin_search_paths=${PROJECT_DIR} \
|
9 |
+
--gin_file="finetune_small_byt5_sentencefix.gin" \
|
10 |
+
--gin.MODEL_DIR="'${MODEL_DIR}'" \
|
11 |
+
--tfds_data_dir=${TFDS_DATA_DIR}
|
12 |
+
|