File size: 844 Bytes
3f9f58d
 
 
 
 
 
c956d7b
 
3f9f58d
c956d7b
3f9f58d
c956d7b
3f9f58d
 
 
 
 
 
c956d7b
3f9f58d
c956d7b
 
3f9f58d
c956d7b
 
3f9f58d
 
c956d7b
 
3f9f58d
 
 
c956d7b
3f9f58d
 
 
c956d7b
3f9f58d
c956d7b
3f9f58d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
{
  "architectures": [
    "DbrxForCausalLM"
  ],
  "attn_config": {
    "attn_pdrop": 0.0,
    "clip_qkv": null,
    "kv_n_heads": 1,
    "model_type": "",
    "rope_theta": 10000.0
  },
  "d_model": 8,
  "dtype": "float32",
  "emb_pdrop": 0.0,
  "ffn_config": {
    "ffn_act_fn": {
      "name": "silu"
    },
    "ffn_hidden_size": 3584,
    "model_type": "",
    "moe_jitter_eps": null,
    "moe_loss_weight": 0.01,
    "moe_normalize_expert_weights": 1.0,
    "moe_num_experts": 4,
    "moe_top_k": 1
  },
  "initializer_range": 0.02,
  "intermediate_size": 32,
  "max_seq_len": 2048,
  "model_type": "dbrx",
  "n_heads": 4,
  "n_layers": 2,
  "num_key_value_heads": 2,
  "output_router_logits": false,
  "resid_pdrop": 0.0,
  "tie_word_embeddings": false,
  "transformers_version": "4.57.0",
  "use_cache": true,
  "vocab_size": 100280
}