some new t4 stuff
Browse files
__pycache__/tasks_v4.cpython-38.pyc
ADDED
Binary file (1.89 kB). View file
|
|
finetune_large_mt5_sentencefix_v4.gin
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __gin__ import dynamic_registration
|
2 |
+
import tasks_v4
|
3 |
+
|
4 |
+
import __main__ as train_script
|
5 |
+
from t5.data import mixtures
|
6 |
+
from t5x import models
|
7 |
+
from t5x import partitioning
|
8 |
+
from t5x import utils
|
9 |
+
|
10 |
+
include "t5x/examples/t5/mt5/small.gin"
|
11 |
+
include "t5x/configs/runs/finetune.gin"
|
12 |
+
|
13 |
+
MIXTURE_OR_TASK_NAME = "sentencefix"
|
14 |
+
TASK_FEATURE_LENGTHS = {"inputs": 256, "targets": 256}
|
15 |
+
TRAIN_STEPS = 1_100_000 # 1000000 pre-trained steps + 20000 fine-tuning steps.
|
16 |
+
USE_CACHED_TASKS = False
|
17 |
+
DROPOUT_RATE = 0.0
|
18 |
+
RANDOM_SEED = 0
|
19 |
+
|
20 |
+
# `LOSS_NORMALIZING_FACTOR`: When fine-tuning a model that was pre-trained
|
21 |
+
# using Mesh Tensorflow (e.g. the public T5 / mT5 / ByT5 models), this should be
|
22 |
+
# set to `pretraining batch_size` * `target_token_length`. For T5 and T5.1.1:
|
23 |
+
# `2048 * 114`. For mT5: `1024 * 229`. For ByT5: `1024 * 189`.
|
24 |
+
#LOSS_NORMALIZING_FACTOR = 234496
|
25 |
+
INITIAL_CHECKPOINT_PATH = "gs://t5-data/pretrained_models/t5x/mt5_large/checkpoint_1000000"
|
26 |
+
|
27 |
+
train_script.train:
|
28 |
+
eval_period = 500
|
29 |
+
partitioner = @partitioning.ModelBasedPjitPartitioner()
|
30 |
+
|
31 |
+
# `num_decodes` is equivalent to a beam size in a beam search decoding.
|
32 |
+
models.EncoderDecoderModel.predict_batch_with_aux.num_decodes = 4
|
33 |
+
|
34 |
+
partitioning.ModelBasedPjitPartitioner.num_partitions = 2
|
35 |
+
|
36 |
+
|
37 |
+
#from t5.models import mesh_transformer
|
38 |
+
#import t5.models
|
39 |
+
#mesh_transformer.learning_rate_schedules.constant_learning_rate.learning_rate = 0.0005
|
40 |
+
#run.learning_rate_schedule = @learning_rate_schedules.constant_learning_rate
|
41 |
+
|
finetune_mt5_sentencefix_v4.gin
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
from __gin__ import dynamic_registration
|
2 |
-
import
|
3 |
|
4 |
import __main__ as train_script
|
5 |
from t5.data import mixtures
|
|
|
1 |
from __gin__ import dynamic_registration
|
2 |
+
import tasks_v4
|
3 |
|
4 |
import __main__ as train_script
|
5 |
from t5.data import mixtures
|
finetune_small_mt5_sentencefix_v4.gin
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __gin__ import dynamic_registration
|
2 |
+
import tasks_v4
|
3 |
+
|
4 |
+
import __main__ as train_script
|
5 |
+
from t5.data import mixtures
|
6 |
+
from t5x import models
|
7 |
+
from t5x import partitioning
|
8 |
+
from t5x import utils
|
9 |
+
|
10 |
+
include "t5x/examples/t5/mt5/small.gin"
|
11 |
+
include "t5x/configs/runs/finetune.gin"
|
12 |
+
|
13 |
+
MIXTURE_OR_TASK_NAME = "sentencefix"
|
14 |
+
TASK_FEATURE_LENGTHS = {"inputs": 256, "targets": 256}
|
15 |
+
TRAIN_STEPS = 1_100_000 # 1000000 pre-trained steps + 20000 fine-tuning steps.
|
16 |
+
USE_CACHED_TASKS = False
|
17 |
+
DROPOUT_RATE = 0.0
|
18 |
+
RANDOM_SEED = 0
|
19 |
+
|
20 |
+
# `LOSS_NORMALIZING_FACTOR`: When fine-tuning a model that was pre-trained
|
21 |
+
# using Mesh Tensorflow (e.g. the public T5 / mT5 / ByT5 models), this should be
|
22 |
+
# set to `pretraining batch_size` * `target_token_length`. For T5 and T5.1.1:
|
23 |
+
# `2048 * 114`. For mT5: `1024 * 229`. For ByT5: `1024 * 189`.
|
24 |
+
#LOSS_NORMALIZING_FACTOR = 234496
|
25 |
+
INITIAL_CHECKPOINT_PATH = "gs://t5-data/pretrained_models/t5x/mt5_small/checkpoint_1000000"
|
26 |
+
|
27 |
+
train_script.train:
|
28 |
+
eval_period = 500
|
29 |
+
partitioner = @partitioning.ModelBasedPjitPartitioner()
|
30 |
+
|
31 |
+
# `num_decodes` is equivalent to a beam size in a beam search decoding.
|
32 |
+
models.EncoderDecoderModel.predict_batch_with_aux.num_decodes = 4
|
33 |
+
|
34 |
+
partitioning.ModelBasedPjitPartitioner.num_partitions = 2
|
35 |
+
|
36 |
+
|
37 |
+
#from t5.models import mesh_transformer
|
38 |
+
#import t5.models
|
39 |
+
#mesh_transformer.learning_rate_schedules.constant_learning_rate.learning_rate = 0.0005
|
40 |
+
#run.learning_rate_schedule = @learning_rate_schedules.constant_learning_rate
|
41 |
+
|
tasksv4.py → tasks_v4.py
RENAMED
File without changes
|