Upload Phi3ForCausalLM
Browse files- config.json +9 -13
- model.safetensors +2 -2
config.json
CHANGED
|
@@ -24,19 +24,15 @@
|
|
| 24 |
"original_max_position_embeddings": 4096,
|
| 25 |
"pad_token_id": 32000,
|
| 26 |
"quantization_config": {
|
| 27 |
-
"
|
| 28 |
-
"
|
| 29 |
-
"
|
| 30 |
-
"
|
| 31 |
-
"
|
| 32 |
-
"
|
| 33 |
-
"
|
| 34 |
-
"
|
| 35 |
-
"
|
| 36 |
-
"llm_int8_threshold": 6.0,
|
| 37 |
-
"load_in_4bit": true,
|
| 38 |
-
"load_in_8bit": false,
|
| 39 |
-
"quant_method": "bitsandbytes"
|
| 40 |
},
|
| 41 |
"resid_pdrop": 0.0,
|
| 42 |
"rms_norm_eps": 1e-05,
|
|
|
|
| 24 |
"original_max_position_embeddings": 4096,
|
| 25 |
"pad_token_id": 32000,
|
| 26 |
"quantization_config": {
|
| 27 |
+
"bits": 4,
|
| 28 |
+
"damp_percent": 0.1,
|
| 29 |
+
"dataset": "wikitext2",
|
| 30 |
+
"desc_act": false,
|
| 31 |
+
"group_size": 128,
|
| 32 |
+
"modules_in_block_to_quantize": null,
|
| 33 |
+
"quant_method": "gptq",
|
| 34 |
+
"sym": true,
|
| 35 |
+
"true_sequential": true
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
},
|
| 37 |
"resid_pdrop": 0.0,
|
| 38 |
"rms_norm_eps": 1e-05,
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8cc2aa9e1daa06456ef8061063e9bade3fd2103ffb3eadcc5370c9a2888dda7b
|
| 3 |
+
size 2279413824
|