Upload Gemma2ForCausalLM
Browse files- config.json +1 -1
- generation_config.json +8 -0
- model-00001-of-00002.safetensors +3 -0
- model-00002-of-00002.safetensors +3 -0
- model.safetensors.index.json +0 -0
config.json
CHANGED
|
@@ -42,7 +42,7 @@
|
|
| 42 |
"rope_theta": 10000.0,
|
| 43 |
"sliding_window": 4096,
|
| 44 |
"sliding_window_size": 4096,
|
| 45 |
-
"torch_dtype": "
|
| 46 |
"transformers_version": "4.42.4",
|
| 47 |
"use_cache": true,
|
| 48 |
"vocab_size": 256000
|
|
|
|
| 42 |
"rope_theta": 10000.0,
|
| 43 |
"sliding_window": 4096,
|
| 44 |
"sliding_window_size": 4096,
|
| 45 |
+
"torch_dtype": "float16",
|
| 46 |
"transformers_version": "4.42.4",
|
| 47 |
"use_cache": true,
|
| 48 |
"vocab_size": 256000
|
generation_config.json
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_from_model_config": true,
|
| 3 |
+
"bos_token_id": 2,
|
| 4 |
+
"cache_implementation": "hybrid",
|
| 5 |
+
"eos_token_id": 1,
|
| 6 |
+
"pad_token_id": 0,
|
| 7 |
+
"transformers_version": "4.42.4"
|
| 8 |
+
}
|
model-00001-of-00002.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:afc137789537ef177dc95e50d2d5b26bb03a9f6a0b15fee950d79568d445df65
|
| 3 |
+
size 4979101178
|
model-00002-of-00002.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a64e22975c2ef87b3b5edcb38e59f20adfdf0cb094204ef6f3ef6e80d23c2200
|
| 3 |
+
size 1151605131
|
model.safetensors.index.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|