maxoul commited on
Commit
900a889
·
verified ·
1 Parent(s): c893412

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +9 -6
config.json CHANGED
@@ -1,12 +1,15 @@
1
  {
2
- "model_type": "splade",
3
- "auto_map": {
4
- "AutoConfig": "splade.SpladeConfig",
5
- "AutoModelForCausalLM": "splade.Splade"
6
- },
7
  "attn_implementation": "flash_attention_2",
 
8
  "bidirectional": true,
 
 
 
9
  "model_name_or_path": "Qwen/Qwen3-8B",
 
 
10
  "padding_side": "left",
11
- "transformers_version": "4.52.4"
 
12
  }
 
1
  {
2
+ "archi_type": "decoder",
 
 
 
 
3
  "attn_implementation": "flash_attention_2",
4
+ "attn_type": "causal",
5
  "bidirectional": true,
6
+ "lexical": false,
7
+ "lora": true,
8
+ "lora_r": 64,
9
  "model_name_or_path": "Qwen/Qwen3-8B",
10
+ "model_type": "splade",
11
+ "n_layers": null,
12
  "padding_side": "left",
13
+ "train_head": false,
14
+ "transformers_version": "4.53.3"
15
  }