| { | |
| "model_type": "moe_translation", | |
| "task": "translation", | |
| "architectures": [ | |
| "MoETranslationModel" | |
| ], | |
| "source_languages": [ | |
| "fr", | |
| "hi", | |
| "bn" | |
| ], | |
| "target_language": "en", | |
| "vocab_size": 32000, | |
| "d_model": 512, | |
| "nhead": 8, | |
| "num_experts": 4, | |
| "num_layers": 6, | |
| "max_seq_len": 256, | |
| "training": { | |
| "stage": "stage2_translation_finetuning", | |
| "epochs_completed": 3, | |
| "best_val_loss": 3.8832832201203304, | |
| "train_loss": 3.95300766737378, | |
| "token_accuracy": 35.949352649289914, | |
| "perplexity": 48.583457946777344 | |
| }, | |
| "framework": "pytorch", | |
| "tokenizer": "sentencepiece", | |
| "base_model": "arka7/moe-multilingual-translator" | |
| } |