{ "meta-llama/Llama-4-Scout-17B-16E-Instruct": [ { "task": "text-generation", "instance_type": "trn1", "batch_size": 1, "sequence_length": 4096, "num_cores": 16, "auto_cast_type": "bf16" }, { "task": "text-generation", "instance_type": "trn1", "batch_size": 4, "sequence_length": 4096, "num_cores": 16, "auto_cast_type": "bf16" }, { "task": "text-generation", "instance_type": "trn1", "batch_size": 1, "sequence_length": 4096, "num_cores": 32, "auto_cast_type": "bf16" }, { "task": "text-generation", "instance_type": "trn1", "batch_size": 4, "sequence_length": 4096, "num_cores": 32, "auto_cast_type": "bf16" }, { "task": "text-generation", "instance_type": "trn1", "batch_size": 8, "sequence_length": 4096, "num_cores": 32, "auto_cast_type": "bf16" }, { "task": "text-generation", "instance_type": "trn2", "batch_size": 1, "sequence_length": 4096, "num_cores": 32, "auto_cast_type": "bf16" }, { "task": "text-generation", "instance_type": "trn2", "batch_size": 4, "sequence_length": 4096, "num_cores": 32, "auto_cast_type": "bf16" }, { "task": "text-generation", "instance_type": "trn2", "batch_size": 8, "sequence_length": 4096, "num_cores": 32, "auto_cast_type": "bf16" }, { "task": "text-generation", "instance_type": "trn2", "batch_size": 16, "sequence_length": 4096, "num_cores": 32, "auto_cast_type": "bf16" }, { "task": "text-generation", "instance_type": "trn2", "batch_size": 32, "sequence_length": 4096, "num_cores": 32, "auto_cast_type": "bf16" } ], "meta-llama/Llama-4-Maverick-17B-128E-Instruct": [ { "task": "text-generation", "instance_type": "trn2", "batch_size": 1, "sequence_length": 4096, "num_cores": 64, "auto_cast_type": "bf16" }, { "task": "text-generation", "instance_type": "trn2", "batch_size": 4, "sequence_length": 4096, "num_cores": 64, "auto_cast_type": "bf16" }, { "task": "text-generation", "instance_type": "trn1", "batch_size": 8, "sequence_length": 4096, "num_cores": 64, "auto_cast_type": "bf16" }, { "task": "text-generation", "instance_type": "trn1", "batch_size": 16, "sequence_length": 4096, "num_cores": 64, "auto_cast_type": "bf16" }, { "task": "text-generation", "instance_type": "trn1", "batch_size": 32, "sequence_length": 4096, "num_cores": 64, "auto_cast_type": "bf16" } ] }