ul2-t5x / mlarge_full.gin
pere's picture
large models
f7ee990
include 't5x/examples/t5/mt5/large.gin'
include 'pretrain_cont.gin'
#include 't5x/configs/runs/pretrain.gin'
#iinclude 't5x/configs/runs/finetune.gin'
# Register necessary SeqIO Tasks/Mixtures.
import t5.data.mixtures
import tasks
MIXTURE_OR_TASK_NAME = %gin.REQUIRED
TASK_FEATURE_LENGTHS = {"inputs": 512, "targets": 512}
TRAIN_STEPS = 1_500_000
DROPOUT_RATE = 0.0 # Changed from the default since T5-1.1 recomments this.
#INITIAL_CHECKPOINT_PATH = "gs://nb-t5x-us-central2/norwegian_NCC_plus_English_t5x_base/checkpoint_1500000"
#INITIAL_CHECKPOINT_PATH = "gs://t5-data/pretrained_models/t5x/t5_1_1_base/checkpoint_1000000"
INITIAL_CHECKPOINT_PATH = "gs://t5-data/pretrained_models/t5x/mt5_large/checkpoint_1000000"
PjitPartitioner.num_partitions = 1
utils.create_learning_rate_scheduler:
base_learning_rate = 1 #This is set to half of the original since it is continued training