{ "architectures": [ "CustomTagalogLLM" ], "dim": 512, "dropout": 0.1, "dtype": "float32", "head_dim": 64, "max_seq_len": 256, "model_type": "custom_gqa_swiglu_llm", "multiple_of": 256, "n_heads": 8, "n_kv_heads": 4, "n_layers": 6, "transformers_version": "4.57.2", "vocab_size": 50257 }