File size: 455 Bytes
c893412
900a889
c893412
900a889
c893412
900a889
 
 
c893412
900a889
 
c893412
900a889
d7fe7dd
d3fec31
 
 
0d7419b
c893412
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
{
  "archi_type": "decoder",
  "attn_implementation": "flash_attention_2",
  "attn_type": "causal",
  "bidirectional": true,
  "lexical": false,
  "lora": true,
  "lora_r": 64,
  "model_name_or_path": "Qwen/Qwen3-8B",
  "model_type": "splade",
  "n_layers": null,
  "padding_side": "left",
  "train_head": false,
  "transformers_version": "4.53.3",
  "auto_map": {
    "AutoConfig": "splade.SpladeConfig",
    "AutoModelForCausalLM": "splade.Splade"
  }
}