kgrabko commited on
Commit
bd2bcd7
·
verified ·
1 Parent(s): fdc3bc6

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +12 -6
config.json CHANGED
@@ -1,7 +1,8 @@
1
  {
2
  "architectures": [
3
- "LlamaForCausalLM"
4
  ],
 
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
@@ -14,23 +15,28 @@
14
  "intermediate_size": 53248,
15
  "max_position_embeddings": 131072,
16
  "mlp_bias": false,
17
- "model_type": "llama",
18
  "num_attention_heads": 128,
19
  "num_hidden_layers": 126,
20
- "num_key_value_heads": 8,
21
  "pad_token_id": null,
22
  "pretraining_tp": 1,
 
 
 
 
 
 
23
  "rms_norm_eps": 1e-05,
24
- "rope_parameters": {
25
  "factor": 8.0,
26
  "high_freq_factor": 4.0,
27
  "low_freq_factor": 1.0,
28
  "original_max_position_embeddings": 8192,
29
- "rope_theta": 500000.0,
30
  "rope_type": "llama3"
31
  },
 
32
  "tie_word_embeddings": false,
33
  "transformers_version": "5.0.0",
34
  "use_cache": true,
35
  "vocab_size": 128256
36
- }
 
1
  {
2
  "architectures": [
3
+ "JiRackTernaryModel"
4
  ],
5
+ "model_type": "jirack_ternary",
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
 
15
  "intermediate_size": 53248,
16
  "max_position_embeddings": 131072,
17
  "mlp_bias": false,
 
18
  "num_attention_heads": 128,
19
  "num_hidden_layers": 126,
20
+ "num_key_value_heads": 16,
21
  "pad_token_id": null,
22
  "pretraining_tp": 1,
23
+ "quantization_config": {
24
+ "quant_method": "bitnet",
25
+ "bits": 2,
26
+ "packing": "2bit_uint8",
27
+ "group_size": 128
28
+ },
29
  "rms_norm_eps": 1e-05,
30
+ "rope_scaling": {
31
  "factor": 8.0,
32
  "high_freq_factor": 4.0,
33
  "low_freq_factor": 1.0,
34
  "original_max_position_embeddings": 8192,
 
35
  "rope_type": "llama3"
36
  },
37
+ "rope_theta": 500000.0,
38
  "tie_word_embeddings": false,
39
  "transformers_version": "5.0.0",
40
  "use_cache": true,
41
  "vocab_size": 128256
42
+ }