evo2-7b / config.json
ishanjmukherjee's picture
Disable Flash Attention (for now)
3d180ae
{
"name": "Evo 2 7B",
"_name_or_path": "evo2_7b",
"architectures": ["Evo2ForCausalLM"],
"model_type": "evo2",
"auto_map": {
"AutoConfig": "configuration_evo2.Evo2Config",
"AutoModelForCausalLM": "modeling_evo2.Evo2ForCausalLM",
"AutoTokenizer": [
"tokenizer.ByteTokenizer",
null
]
},
"vocab_size": 512,
"hidden_size": 4096,
"num_filters": 4096,
"hcl_layer_idxs": [
2,
6,
9,
13,
16,
20,
23,
27,
30
],
"hcm_layer_idxs": [
1,
5,
8,
12,
15,
19,
22,
26,
29
],
"hcs_layer_idxs": [
0,
4,
7,
11,
14,
18,
21,
25,
28
],
"attn_layer_idxs": [
3,
10,
17,
24,
31
],
"hcm_filter_length": 128,
"hcl_filter_groups": 4096,
"hcm_filter_groups": 256,
"hcs_filter_groups": 256,
"hcs_filter_length": 7,
"num_layers": 32,
"short_filter_length": 3,
"num_attention_heads": 32,
"short_filter_bias": false,
"eps": 0.000001,
"state_size": 16,
"rotary_emb_base": 100000000000,
"rotary_emb_scaling_factor": 128,
"use_interpolated_rotary_pos_emb": true,
"make_vocab_size_divisible_by": 8,
"inner_size_multiple_of": 16,
"inner_mlp_size": 11264,
"log_intermediate_values": false,
"proj_groups": 1,
"hyena_filter_groups": 1,
"column_split_hyena": false,
"column_split": true,
"interleave": true,
"evo2_style_activations": true,
"model_parallel_size": 1,
"pipe_parallel_size": 1,
"tie_embeddings": true,
"mha_out_proj_bias": true,
"hyena_out_proj_bias": true,
"hyena_flip_x1x2": false,
"qkv_proj_bias": false,
"use_fp8_input_projections": true,
"max_seqlen": 1048576,
"max_batch_size": 1,
"final_norm": true,
"use_flash_attn": false,
"use_flash_rmsnorm": false,
"use_flash_depthwise": false,
"use_flashfft": false,
"use_laughing_hyena": false,
"inference_mode": true,
"print_activations": false
}