even smaller batch size
Browse files
finetune_large_mt5_sentencefix_v4_16.gin
CHANGED
@@ -23,7 +23,7 @@ RANDOM_SEED = 0
|
|
23 |
# `2048 * 114`. For mT5: `1024 * 229`. For ByT5: `1024 * 189`.
|
24 |
#LOSS_NORMALIZING_FACTOR = 234496
|
25 |
INITIAL_CHECKPOINT_PATH = "gs://t5-data/pretrained_models/t5x/mt5_large/checkpoint_1000000"
|
26 |
-
BATCH_SIZE =
|
27 |
|
28 |
#train_script.train:
|
29 |
# eval_period = 500
|
|
|
23 |
# `2048 * 114`. For mT5: `1024 * 229`. For ByT5: `1024 * 189`.
|
24 |
#LOSS_NORMALIZING_FACTOR = 234496
|
25 |
INITIAL_CHECKPOINT_PATH = "gs://t5-data/pretrained_models/t5x/mt5_large/checkpoint_1000000"
|
26 |
+
BATCH_SIZE = 256
|
27 |
|
28 |
#train_script.train:
|
29 |
# eval_period = 500
|