mansaripo commited on
Commit
05b36fb
·
verified ·
1 Parent(s): a372444

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. config.json +0 -3
  2. configuration_cloverlm.py +0 -6
config.json CHANGED
@@ -23,9 +23,6 @@
23
  "num_blocks": 29,
24
  "num_hidden_layers": 29,
25
  "num_key_value_heads": 7,
26
- "quantization_config": {
27
- "quant_method": "quartet2"
28
- },
29
  "quartet_2_impl": "pseudoquant",
30
  "ratio": 4,
31
  "scale_type": "1/sqrt(d)",
 
23
  "num_blocks": 29,
24
  "num_hidden_layers": 29,
25
  "num_key_value_heads": 7,
 
 
 
26
  "quartet_2_impl": "pseudoquant",
27
  "ratio": 4,
28
  "scale_type": "1/sqrt(d)",
configuration_cloverlm.py CHANGED
@@ -23,7 +23,6 @@ class CloverLMConfig(PretrainedConfig):
23
  num_attention_heads=None,
24
  num_key_value_heads=None,
25
  head_dim=None,
26
- quantization_config=None,
27
  **kwargs,
28
  ):
29
  self.num_blocks = num_blocks
@@ -56,11 +55,6 @@ class CloverLMConfig(PretrainedConfig):
56
  else heads // ratio
57
  )
58
  self.head_dim = head_dim if head_dim is not None else d_head
59
- self.quantization_config = (
60
- quantization_config
61
- if quantization_config is not None
62
- else {"quant_method": "quartet2"}
63
- )
64
 
65
  kwargs.pop("tie_word_embeddings", None)
66
  super().__init__(
 
23
  num_attention_heads=None,
24
  num_key_value_heads=None,
25
  head_dim=None,
 
26
  **kwargs,
27
  ):
28
  self.num_blocks = num_blocks
 
55
  else heads // ratio
56
  )
57
  self.head_dim = head_dim if head_dim is not None else d_head
 
 
 
 
 
58
 
59
  kwargs.pop("tie_word_embeddings", None)
60
  super().__init__(