{ "architectures": [ "Hui_CausalLM_Model" ], "cfg": { "context_length": 1024, "drop_rate": 0.1, "emb_dim": 768, "n_heads": 12, "n_layers": 12, "qkv_bias": false, "vocab_size": 50257 }, "model_type": "hui_causallm", "torch_dtype": "float32", "transformers_version": "4.49.0" }