Upload folder using huggingface_hub
Browse files- demo/README.md +6 -2
- demo/adapter_config.json +3 -3
- demo/adapter_model.safetensors +1 -1
demo/README.md
CHANGED
|
@@ -1,12 +1,15 @@
|
|
| 1 |
---
|
| 2 |
base_model: facebook/opt-350m
|
| 3 |
-
library_name:
|
| 4 |
model_name: sft-custom-lora
|
| 5 |
tags:
|
| 6 |
-
-
|
|
|
|
| 7 |
- sft
|
|
|
|
| 8 |
- trl
|
| 9 |
licence: license
|
|
|
|
| 10 |
---
|
| 11 |
|
| 12 |
# Model Card for sft-custom-lora
|
|
@@ -34,6 +37,7 @@ This model was trained with SFT.
|
|
| 34 |
|
| 35 |
### Framework versions
|
| 36 |
|
|
|
|
| 37 |
- TRL: 0.27.1
|
| 38 |
- Transformers: 5.0.0
|
| 39 |
- Pytorch: 2.8.0
|
|
|
|
| 1 |
---
|
| 2 |
base_model: facebook/opt-350m
|
| 3 |
+
library_name: peft
|
| 4 |
model_name: sft-custom-lora
|
| 5 |
tags:
|
| 6 |
+
- base_model:adapter:facebook/opt-350m
|
| 7 |
+
- lora
|
| 8 |
- sft
|
| 9 |
+
- transformers
|
| 10 |
- trl
|
| 11 |
licence: license
|
| 12 |
+
pipeline_tag: text-generation
|
| 13 |
---
|
| 14 |
|
| 15 |
# Model Card for sft-custom-lora
|
|
|
|
| 37 |
|
| 38 |
### Framework versions
|
| 39 |
|
| 40 |
+
- PEFT 0.18.1
|
| 41 |
- TRL: 0.27.1
|
| 42 |
- Transformers: 5.0.0
|
| 43 |
- Pytorch: 2.8.0
|
demo/adapter_config.json
CHANGED
|
@@ -29,10 +29,10 @@
|
|
| 29 |
"rank_pattern": {},
|
| 30 |
"revision": null,
|
| 31 |
"target_modules": [
|
| 32 |
-
"
|
| 33 |
"o_proj",
|
| 34 |
-
"
|
| 35 |
-
"
|
| 36 |
],
|
| 37 |
"target_parameters": null,
|
| 38 |
"task_type": "CAUSAL_LM",
|
|
|
|
| 29 |
"rank_pattern": {},
|
| 30 |
"revision": null,
|
| 31 |
"target_modules": [
|
| 32 |
+
"v_proj",
|
| 33 |
"o_proj",
|
| 34 |
+
"q_proj",
|
| 35 |
+
"k_proj"
|
| 36 |
],
|
| 37 |
"target_parameters": null,
|
| 38 |
"task_type": "CAUSAL_LM",
|
demo/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 18894768
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1dd421e2d02e36b8559baec7afe62af6d355a645f4c1369a0fe33699cd971092
|
| 3 |
size 18894768
|