astro_nano_final / config.json
humanvprojectceo's picture
humanvprojectceo/Astro-HumanV-Nano-Instruct
dc0dac2 verified
{
"activation_backend": "silu",
"architectures": [
"HumanVForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.1,
"attn_backend": "gqa_matmul",
"attn_implementation": "eager",
"bos_token_id": 151643,
"dtype": "float32",
"eos_token_id": 151643,
"head_dim": 64,
"hidden_act": "silu",
"hidden_dropout": 0.1,
"hidden_size": 512,
"initializer_range": 0.02,
"intermediate_size": 2048,
"kv_cache_dtype": "auto",
"layer_types": [
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention"
],
"max_position_embeddings": 2048,
"max_window_layers": 0,
"mlp_bias": false,
"model_type": "humanv",
"norm_backend": "rmsnorm",
"num_attention_heads": 8,
"num_hidden_layers": 12,
"num_key_value_heads": 4,
"pad_token_id": 151643,
"resid_dropout": 0.1,
"rms_norm_eps": 1e-05,
"rope_parameters": {
"rope_theta": 10000.0,
"rope_type": "default"
},
"rope_partial_rotary_factor": 1.0,
"selective_rope_scale": 1.0,
"sliding_window": null,
"sparse_attention_impl": "local_global_block",
"sparse_attention_window": 256,
"sparse_block_size": 64,
"sparse_global_block_stride": 4,
"sparse_global_num_blocks": 2,
"sparse_local_num_blocks": 4,
"sparse_prefill_chunk_blocks": 16,
"transformers_version": "5.0.0.dev0",
"use_cache": false,
"use_selective_rope": false,
"use_sliding_window": false,
"use_sparse_attention": true,
"vocab_size": 151665
}