Sayan01 commited on
Commit
9b8e9d5
·
verified ·
1 Parent(s): 9bf608e

Upload Qwen2ForCausalLM

Browse files
Files changed (1) hide show
  1. config.json +0 -1
config.json CHANGED
@@ -16,7 +16,6 @@
16
  "num_attention_heads": 16,
17
  "num_hidden_layers": 18,
18
  "num_key_value_heads": 2,
19
- "output_hidden_states": false,
20
  "rms_norm_eps": 1e-06,
21
  "rope_scaling": null,
22
  "rope_theta": 1000000.0,
 
16
  "num_attention_heads": 16,
17
  "num_hidden_layers": 18,
18
  "num_key_value_heads": 2,
 
19
  "rms_norm_eps": 1e-06,
20
  "rope_scaling": null,
21
  "rope_theta": 1000000.0,