Upload GptOssForCausalLM
Browse files- config.json +1 -1
- model.safetensors +2 -2
config.json
CHANGED
|
@@ -61,7 +61,7 @@
|
|
| 61 |
"sliding_window": 128,
|
| 62 |
"swiglu_limit": 7.0,
|
| 63 |
"tie_word_embeddings": false,
|
| 64 |
-
"torch_dtype": "
|
| 65 |
"transformers_version": "4.55.4",
|
| 66 |
"use_cache": true,
|
| 67 |
"vocab_size": 201088
|
|
|
|
| 61 |
"sliding_window": 128,
|
| 62 |
"swiglu_limit": 7.0,
|
| 63 |
"tie_word_embeddings": false,
|
| 64 |
+
"torch_dtype": "bfloat16",
|
| 65 |
"transformers_version": "4.55.4",
|
| 66 |
"use_cache": true,
|
| 67 |
"vocab_size": 201088
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7ce5c5057b6d6c258ecc5d5fec928ec1a175c6475625b524e5a4e2fba1fb02ea
|
| 3 |
+
size 12890328
|