experiments / config.json
fracapuano's picture
Training in progress, epoch 1
da0c6a6 verified
raw
history blame contribute delete
493 Bytes
{
"architectures": [
"Seq2SeqCrossFormer"
],
"bos_token_id": 1,
"d_ff": 128,
"d_model": 64,
"dropout": 0.1,
"eos_token_id": 2,
"model_size": 563074,
"model_type": "custom_transformer",
"n_heads": 8,
"n_layers": 1,
"pad_token_id": 0,
"router_dim": 10,
"sequence_length": 256,
"source_sequence_dimension": 70,
"target_sequence_dimension": 306,
"torch_dtype": "float32",
"transformers_version": "4.48.0",
"vocab_size_src": 258,
"vocab_size_tgt": 258
}