File size: 745 Bytes
e86f9ce |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
nr_frozen_epochs: 0.3
keep_embeddings_frozen: true
optimizer: AdamW
warmup_steps: 0
encoder_learning_rate: 1.0e-06
learning_rate: 1.5e-05
layerwise_decay: 0.95
encoder_model: XLM-RoBERTa
pretrained_model: microsoft/infoxlm-large
pool: avg
layer: mix
layer_transformation: sparsemax
layer_norm: false
loss: mse
dropout: 0.1
batch_size: 1
train_data:
- /content/COMET/data/train.csv
validation_data:
- /content/COMET/data/val.csv
class_identifier: unified_metric
load_pretrained_weights: true
local_files_only: false
sent_layer: mix
word_layer: 24
hidden_sizes:
- 3072
- 1024
activations: Tanh
final_activation: null
input_segments:
- mt
- src
word_level_training: false
loss_lambda: 0.65
error_labels:
- minor
- major
cross_entropy_weights: null
|