File size: 506 Bytes
4a11121
659af08
0fd068f
659af08
0fd068f
659af08
 
 
 
 
 
 
 
 
0fd068f
659af08
071f001
 
 
 
 
0fd068f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
{
  "archi_type": "decoder",
  "architectures": [
    "Splade"
  ],
  "attn_implementation": "flash_attention_2",
  "attn_type": "causal",
  "bidirectional": true,
  "lora": false,
  "lora_r": 0,
  "model_name_or_path": "Qwen/Qwen3-0.6B",
  "model_type": "splade",
  "n_layers": null,
  "padding_side": "left",
  "torch_dtype": "bfloat16",
  "train_head": false,
  "transformers_version": "4.53.3",
  "auto_map": {
    "AutoConfig": "splade.SpladeConfig",
    "AutoModelForCausalLM": "splade.Splade"
  }
}