Azrail commited on
Commit
fbb3ef5
·
verified ·
1 Parent(s): 0907f49

Upload SmalLmForCausalLM

Browse files
Files changed (1) hide show
  1. config.json +5 -1
config.json CHANGED
@@ -4,6 +4,10 @@
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.1,
 
 
 
 
7
  "balancing_coef": 0.0001,
8
  "bos_token_id": 1,
9
  "embedding_dropout": 0.0,
@@ -38,7 +42,7 @@
38
  "sliding_window_attention": true,
39
  "sliding_window_context": 1024,
40
  "sliding_window_period": 4,
41
- "static_residual": false,
42
  "token_experts": 3,
43
  "torch_dtype": "float32",
44
  "transformers_version": "4.50.3",
 
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.1,
7
+ "auto_map": {
8
+ "AutoConfig": "config.SmalLmConfig",
9
+ "AutoModelForCausalLM": "model.SmalLmForCausalLM"
10
+ },
11
  "balancing_coef": 0.0001,
12
  "bos_token_id": 1,
13
  "embedding_dropout": 0.0,
 
42
  "sliding_window_attention": true,
43
  "sliding_window_context": 1024,
44
  "sliding_window_period": 4,
45
+ "static_residual": true,
46
  "token_experts": 3,
47
  "torch_dtype": "float32",
48
  "transformers_version": "4.50.3",