maxoul commited on
Commit
4a11121
·
verified ·
1 Parent(s): 2211fc5

Create config.json

Browse files
Files changed (1) hide show
  1. config.json +19 -0
config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "archi_type": "decoder",
3
+ "attn_implementation": "flash_attention_2",
4
+ "attn_type": "causal",
5
+ "bidirectional": true,
6
+ "lexical": false,
7
+ "lora": true,
8
+ "lora_r": 64,
9
+ "model_name_or_path": "Qwen/Qwen3-8B",
10
+ "model_type": "splade",
11
+ "n_layers": null,
12
+ "padding_side": "left",
13
+ "train_head": false,
14
+ "transformers_version": "4.53.3",
15
+ "auto_map": {
16
+ "AutoConfig": "splade.SpladeConfig",
17
+ "AutoModelForCausalLM": "splade.Splade"
18
+ }
19
+ }