Maxtimer97 commited on
Commit
ce81c56
·
1 Parent(s): a2f57c7

Added attn_imp in config

Browse files
Files changed (1) hide show
  1. configuration_chatglm.py +2 -0
configuration_chatglm.py CHANGED
@@ -17,6 +17,7 @@ class ChatGLMConfig(PretrainedConfig):
17
  kernel_size=64,
18
  kernel_stride=64,
19
  window_size=512,
 
20
  topk=16,
21
  init_blocks=1,
22
  local_blocks=2,
@@ -38,6 +39,7 @@ class ChatGLMConfig(PretrainedConfig):
38
  fp32_residual_connection=False,
39
  **kwargs
40
  ):
 
41
  self.num_layers = num_layers
42
  self.vocab_size = padded_vocab_size
43
  self.padded_vocab_size = padded_vocab_size
 
17
  kernel_size=64,
18
  kernel_stride=64,
19
  window_size=512,
20
+ attn_implementation="nsa",
21
  topk=16,
22
  init_blocks=1,
23
  local_blocks=2,
 
39
  fp32_residual_connection=False,
40
  **kwargs
41
  ):
42
+ self.attn_implementation = attn_implementation
43
  self.num_layers = num_layers
44
  self.vocab_size = padded_vocab_size
45
  self.padded_vocab_size = padded_vocab_size