{ "activation_dropout": 0.1, "activation_function": "gelu", "architectures": [ "MarianMTModel" ], "attention_dropout": 0.1, "d_model": 768, "decoder_attention_heads": 12, "decoder_ffn_dim": 3072, "decoder_layerdrop": 0.0, "decoder_layers": 8, "decoder_start_token_id": 0, "decoder_vocab_size": 32000, "dropout": 0.1, "dtype": "float32", "encoder_attention_heads": 12, "encoder_ffn_dim": 3072, "encoder_layerdrop": 0.0, "encoder_layers": 8, "eos_token_id": 3, "forced_eos_token_id": null, "init_std": 0.02, "is_encoder_decoder": true, "max_position_embeddings": 512, "model_type": "marian", "num_hidden_layers": 8, "pad_token_id": 0, "scale_embedding": false, "share_encoder_decoder_embeddings": true, "transformers_version": "4.57.3", "use_cache": true, "vocab_size": 32000 }