unfavalen commited on
Commit
fc9206b
·
verified ·
1 Parent(s): 4d5a069
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "architectures": [
3
- "GptOssForCausalLM"
4
  ],
5
  "attention_bias": true,
6
  "attention_dropout": 0.0,
@@ -40,7 +40,7 @@
40
  "full_attention"
41
  ],
42
  "max_position_embeddings": 131072,
43
- "model_type": "gpt_oss",
44
  "num_attention_heads": 64,
45
  "num_experts_per_tok": 4,
46
  "num_hidden_layers": 24,
 
1
  {
2
  "architectures": [
3
+ "LexaDeltaForCausalLM"
4
  ],
5
  "attention_bias": true,
6
  "attention_dropout": 0.0,
 
40
  "full_attention"
41
  ],
42
  "max_position_embeddings": 131072,
43
+ "model_type": "lexa_delta",
44
  "num_attention_heads": 64,
45
  "num_experts_per_tok": 4,
46
  "num_hidden_layers": 24,