{ "architectures": [ "SoraForSLM" ], "dtype": "float32", "hidden_size": 512, "max_position_embeddings": 512, "model_type": "sora_slm", "auto_map": { "AutoConfig": "configuration_sora.SoraConfig", "AutoModelForCausalLM": "modeling_sora.SoraForSLM" }, "num_heads": 8, "num_layers": 8, "transformers_version": "5.0.0", "use_cache": false, "vocab_size": 2628 }