Azrail commited on
Commit
2667be4
·
verified ·
1 Parent(s): 64c1cb3

Upload SmalLmForCausalLM

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -39,7 +39,7 @@
39
  "sliding_window_attention": true,
40
  "sliding_window_context": 1024,
41
  "sliding_window_period": 4,
42
- "static_residual": false,
43
  "token_experts": 3,
44
  "torch_dtype": "float32",
45
  "transformers_version": "4.50.3",
 
39
  "sliding_window_attention": true,
40
  "sliding_window_context": 1024,
41
  "sliding_window_period": 4,
42
+ "static_residual": true,
43
  "token_experts": 3,
44
  "torch_dtype": "float32",
45
  "transformers_version": "4.50.3",