Upload PhiForCausalLM
Browse files- config.json +2 -2
- model.safetensors +2 -2
config.json
CHANGED
|
@@ -27,8 +27,8 @@
|
|
| 27 |
"llm_int8_has_fp16_weight": false,
|
| 28 |
"llm_int8_skip_modules": null,
|
| 29 |
"llm_int8_threshold": 6.0,
|
| 30 |
-
"load_in_4bit":
|
| 31 |
-
"load_in_8bit":
|
| 32 |
"quant_method": "bitsandbytes"
|
| 33 |
},
|
| 34 |
"resid_pdrop": 0.1,
|
|
|
|
| 27 |
"llm_int8_has_fp16_weight": false,
|
| 28 |
"llm_int8_skip_modules": null,
|
| 29 |
"llm_int8_threshold": 6.0,
|
| 30 |
+
"load_in_4bit": true,
|
| 31 |
+
"load_in_8bit": false,
|
| 32 |
"quant_method": "bitsandbytes"
|
| 33 |
},
|
| 34 |
"resid_pdrop": 0.1,
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2997903422f4d39a95b4beec496cfed55d2ca8aac12a360b5a3c82b38b299596
|
| 3 |
+
size 1824834119
|