cryptonaut commited on
Commit
676cdc6
·
verified ·
1 Parent(s): 0eead04

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -2,7 +2,7 @@
2
  "H_cycles": 3,
3
  "L_cycles": 2,
4
  "architectures": [
5
- "GemmaHRMForCausalLM"
6
  ],
7
  "attention_bias": false,
8
  "attention_dropout": 0.0,
@@ -18,7 +18,7 @@
18
  "max_position_embeddings": 8192,
19
  "max_thoughts": 4,
20
  "merged_lm_and_talk_heads": false,
21
- "model_type": "gemma_hrm",
22
  "num_attention_heads": 8,
23
  "num_hidden_layers": 8,
24
  "num_key_value_heads": 8,
 
2
  "H_cycles": 3,
3
  "L_cycles": 2,
4
  "architectures": [
5
+ "GemmaForCausalLM"
6
  ],
7
  "attention_bias": false,
8
  "attention_dropout": 0.0,
 
18
  "max_position_embeddings": 8192,
19
  "max_thoughts": 4,
20
  "merged_lm_and_talk_heads": false,
21
+ "model_type": "gemma",
22
  "num_attention_heads": 8,
23
  "num_hidden_layers": 8,
24
  "num_key_value_heads": 8,