Upload folder using huggingface_hub
Browse files- README.md +4 -4
- config.json +1 -1
- mergekit_config.yml +2 -2
- model-00001-of-00001.safetensors +1 -1
README.md
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
base_model:
|
|
|
|
| 3 |
- rwh/s1_1712781243
|
| 4 |
-
- abc88767/22c75
|
| 5 |
library_name: transformers
|
| 6 |
tags:
|
| 7 |
- mergekit
|
|
@@ -20,8 +20,8 @@ This model was merged using the SLERP merge method.
|
|
| 20 |
### Models Merged
|
| 21 |
|
| 22 |
The following models were included in the merge:
|
|
|
|
| 23 |
* [rwh/s1_1712781243](https://huggingface.co/rwh/s1_1712781243)
|
| 24 |
-
* [abc88767/22c75](https://huggingface.co/abc88767/22c75)
|
| 25 |
|
| 26 |
### Configuration
|
| 27 |
|
|
@@ -33,10 +33,10 @@ slices:
|
|
| 33 |
- sources:
|
| 34 |
- model: rwh/s1_1712781243
|
| 35 |
layer_range: [0, 24]
|
| 36 |
-
- model: abc88767/
|
| 37 |
layer_range: [0, 24]
|
| 38 |
merge_method: slerp
|
| 39 |
-
base_model: abc88767/
|
| 40 |
parameters:
|
| 41 |
t:
|
| 42 |
- filter: self_attn
|
|
|
|
| 1 |
---
|
| 2 |
base_model:
|
| 3 |
+
- abc88767/2c75
|
| 4 |
- rwh/s1_1712781243
|
|
|
|
| 5 |
library_name: transformers
|
| 6 |
tags:
|
| 7 |
- mergekit
|
|
|
|
| 20 |
### Models Merged
|
| 21 |
|
| 22 |
The following models were included in the merge:
|
| 23 |
+
* [abc88767/2c75](https://huggingface.co/abc88767/2c75)
|
| 24 |
* [rwh/s1_1712781243](https://huggingface.co/rwh/s1_1712781243)
|
|
|
|
| 25 |
|
| 26 |
### Configuration
|
| 27 |
|
|
|
|
| 33 |
- sources:
|
| 34 |
- model: rwh/s1_1712781243
|
| 35 |
layer_range: [0, 24]
|
| 36 |
+
- model: abc88767/2c75
|
| 37 |
layer_range: [0, 24]
|
| 38 |
merge_method: slerp
|
| 39 |
+
base_model: abc88767/2c75
|
| 40 |
parameters:
|
| 41 |
t:
|
| 42 |
- filter: self_attn
|
config.json
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
{
|
| 2 |
-
"_name_or_path": "abc88767/
|
| 3 |
"architectures": [
|
| 4 |
"StableLmForCausalLM"
|
| 5 |
],
|
|
|
|
| 1 |
{
|
| 2 |
+
"_name_or_path": "abc88767/2c75",
|
| 3 |
"architectures": [
|
| 4 |
"StableLmForCausalLM"
|
| 5 |
],
|
mergekit_config.yml
CHANGED
|
@@ -3,10 +3,10 @@ slices:
|
|
| 3 |
- sources:
|
| 4 |
- model: rwh/s1_1712781243
|
| 5 |
layer_range: [0, 24]
|
| 6 |
-
- model: abc88767/
|
| 7 |
layer_range: [0, 24]
|
| 8 |
merge_method: slerp
|
| 9 |
-
base_model: abc88767/
|
| 10 |
parameters:
|
| 11 |
t:
|
| 12 |
- filter: self_attn
|
|
|
|
| 3 |
- sources:
|
| 4 |
- model: rwh/s1_1712781243
|
| 5 |
layer_range: [0, 24]
|
| 6 |
+
- model: abc88767/2c75
|
| 7 |
layer_range: [0, 24]
|
| 8 |
merge_method: slerp
|
| 9 |
+
base_model: abc88767/2c75
|
| 10 |
parameters:
|
| 11 |
t:
|
| 12 |
- filter: self_attn
|
model-00001-of-00001.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 3289069520
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:339df45ee76ecd981d85a14b75e1f3e037d2c55f180322cb4ebb17aba535880c
|
| 3 |
size 3289069520
|