Minhdn commited on
Commit
67c0284
·
verified ·
1 Parent(s): af9d8e7

Upload folder using huggingface_hub

Browse files
chat_template.jinja ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- set found_item = false -%}
2
+ {%- for message in messages -%}
3
+ {%- if message['role'] == 'system' -%}
4
+ {%- set found_item = true -%}
5
+ {%- endif -%}
6
+ {%- endfor -%}
7
+ {%- if not found_item -%}
8
+ {{'You are an AI programming assistant, utilizing the Deepseek Coder model, developed by Deepseek Company, and you only answer questions related to computer science. For politically sensitive questions, security and privacy issues, and other non-computer science questions, you will refuse to answer.\n'}}
9
+ {%- endif %}
10
+ {%- for message in messages %}
11
+ {%- if message['role'] == 'system' %}
12
+ {{ message['content'] }}
13
+ {%- else %}
14
+ {%- if message['role'] == 'user' %}
15
+ {{'### Instruction:\n' + message['content'] + '\n'}}
16
+ {%- else %}
17
+ {{'### Response:\n' + message['content'] + '\n<|EOT|>\n'}}
18
+ {%- endif %}
19
+ {%- endif %}
20
+ {%- endfor %}
21
+ {{'### Response:\n'}}
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 100000,
8
+ "eos_token_id": 100001,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 4096,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 11008,
14
+ "max_position_embeddings": 4096,
15
+ "mlp_bias": false,
16
+ "model_type": "llama",
17
+ "num_attention_heads": 32,
18
+ "num_hidden_layers": 30,
19
+ "num_key_value_heads": 32,
20
+ "pretraining_tp": 1,
21
+ "rms_norm_eps": 1e-06,
22
+ "rope_scaling": null,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": false,
25
+ "torch_dtype": "bfloat16",
26
+ "transformers_version": "4.55.0",
27
+ "use_cache": true,
28
+ "vocab_size": 102400
29
+ }
model-00001-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6d794a34fe60fcf2c35f97a8a3dd9692798c52c5d8971be1284fac2d2de6f9d
3
+ size 3978116712
model-00002-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31280eb0444b8005810b03ed54678783f2a6c448e5e2f38c6fe0f839f99283f9
3
+ size 838860896
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
model.safetensors.index.json.meta.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"model.layers.0.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.0.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.0.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.0.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.0.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.0.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.0.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.1.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.1.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.1.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.1.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.1.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.1.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.1.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.2.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.2.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.2.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.2.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.2.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.2.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.2.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.3.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.3.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.3.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.3.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.3.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.3.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.3.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.4.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.4.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.4.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.4.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.4.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.4.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.4.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.5.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.5.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.5.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.5.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.5.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.5.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.5.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.6.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.6.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.6.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.6.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.6.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.6.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.6.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.7.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.7.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.7.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.7.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.7.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.7.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.7.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.8.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.8.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.8.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.8.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.8.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.8.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.8.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.9.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.9.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.9.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.9.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.9.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.9.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.9.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.10.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.10.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.10.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.10.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.10.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.10.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.10.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.11.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.11.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.11.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.11.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.11.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.11.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.11.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.12.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.12.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.12.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.12.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.12.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.12.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.12.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.13.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.13.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.13.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.13.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.13.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.13.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.13.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.14.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.14.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.14.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.14.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.14.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.14.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.14.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.15.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.15.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.15.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.15.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.15.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.15.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.15.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.16.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.16.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.16.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.16.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.16.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.16.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.16.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.17.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.17.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.17.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.17.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.17.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.17.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.17.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.18.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.18.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.18.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.18.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.18.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.18.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.18.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.19.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.19.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.19.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.19.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.19.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.19.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.19.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.20.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.20.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.20.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.20.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.20.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.20.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.20.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.21.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.21.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.21.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.21.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.21.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.21.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.21.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.22.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.22.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.22.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.22.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.22.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.22.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.22.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.23.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.23.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.23.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.23.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.23.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.23.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.23.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.24.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.24.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.24.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.24.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.24.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.24.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.24.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.25.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.25.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.25.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.25.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.25.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.25.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.25.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.26.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.26.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.26.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.26.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.26.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.26.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.26.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.27.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.27.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.27.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.27.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.27.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.27.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.27.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.28.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.28.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.28.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.28.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.28.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.28.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.28.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.29.self_attn.q_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.29.self_attn.k_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.29.self_attn.v_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.29.self_attn.o_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 4096], "scale.shape": [131072, 1], "awq_scale": null, "zero.shape": [131072, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.29.mlp.gate_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.29.mlp.up_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [11008, 4096], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}, "model.layers.29.mlp.down_proj": {"meta": {"nbits": 4, "group_size": 128, "shape": [4096, 11008], "scale.shape": [352256, 1], "awq_scale": null, "zero.shape": [352256, 1], "axis": 1, "packing": "4bit_u8", "method": "sinq_quantAux", "compute_dtype": "torch.float16", "unpack_view_dtype": "torch.uint8", "view_as_float": false}}}
special_tokens_map.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|begin▁of▁sentence|>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|end▁of▁sentence|>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<|end▁of▁sentence|>"
17
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "100000": {
7
+ "content": "<|begin▁of▁sentence|>",
8
+ "lstrip": false,
9
+ "normalized": true,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "100001": {
15
+ "content": "<|end▁of▁sentence|>",
16
+ "lstrip": false,
17
+ "normalized": true,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ }
22
+ },
23
+ "bos_token": "<|begin▁of▁sentence|>",
24
+ "clean_up_tokenization_spaces": false,
25
+ "eos_token": "<|end▁of▁sentence|>",
26
+ "extra_special_tokens": {},
27
+ "legacy": true,
28
+ "model_max_length": 16384,
29
+ "pad_token": "<|end▁of▁sentence|>",
30
+ "sp_model_kwargs": {},
31
+ "tokenizer_class": "LlamaTokenizerFast",
32
+ "unk_token": null,
33
+ "use_default_system_prompt": false
34
+ }