VSRV commited on
Commit
f9341da
·
verified ·
1 Parent(s): 5182b80

Upload folder using huggingface_hub

Browse files
README.md CHANGED
@@ -1,11 +1,13 @@
1
  ---
2
- base_model: TinyLlama/TinyLlama-1.1B-Chat-v1.0
3
  library_name: peft
4
- pipeline_tag: text-generation
5
  tags:
6
- - base_model:adapter:TinyLlama/TinyLlama-1.1B-Chat-v1.0
7
  - lora
 
8
  - transformers
 
 
9
  ---
10
 
11
  # Model Card for Model ID
@@ -204,4 +206,4 @@ Carbon emissions can be estimated using the [Machine Learning Impact calculator]
204
  [More Information Needed]
205
  ### Framework versions
206
 
207
- - PEFT 0.17.0
 
1
  ---
2
+ base_model: unsloth/tinyllama-chat
3
  library_name: peft
 
4
  tags:
5
+ - base_model:adapter:unsloth/tinyllama-chat
6
  - lora
7
+ - sft
8
  - transformers
9
+ - trl
10
+ - unsloth
11
  ---
12
 
13
  # Model Card for Model ID
 
206
  [More Information Needed]
207
  ### Framework versions
208
 
209
+ - PEFT 0.17.1
adapter_config.json CHANGED
@@ -1,7 +1,10 @@
1
  {
2
  "alpha_pattern": {},
3
- "auto_mapping": null,
4
- "base_model_name_or_path": "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
 
 
 
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
@@ -29,7 +32,7 @@
29
  "q_proj"
30
  ],
31
  "target_parameters": null,
32
- "task_type": "CAUSAL_LM",
33
  "trainable_token_indices": null,
34
  "use_dora": false,
35
  "use_qalora": false,
 
1
  {
2
  "alpha_pattern": {},
3
+ "auto_mapping": {
4
+ "base_model_class": "LlamaForCausalLM",
5
+ "parent_library": "transformers.models.llama.modeling_llama"
6
+ },
7
+ "base_model_name_or_path": "unsloth/tinyllama-chat",
8
  "bias": "none",
9
  "corda_config": null,
10
  "eva_config": null,
 
32
  "q_proj"
33
  ],
34
  "target_parameters": null,
35
+ "task_type": null,
36
  "trainable_token_indices": null,
37
  "use_dora": false,
38
  "use_qalora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:59c6a7df8e70110aba8ab57f9e0fd4dd7fea46889c57fb8e00d10d68907405cc
3
  size 4517152
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d671e9937b3969bcf4e56d474cd2f3204633a79b9918a8c2cf55495fc3cd58c
3
  size 4517152
special_tokens_map.json CHANGED
@@ -14,7 +14,7 @@
14
  "single_word": false
15
  },
16
  "pad_token": {
17
- "content": "</s>",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
 
14
  "single_word": false
15
  },
16
  "pad_token": {
17
+ "content": "<unk>",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
tokenizer.json CHANGED
@@ -12,9 +12,9 @@
12
  },
13
  "direction": "Right",
14
  "pad_to_multiple_of": null,
15
- "pad_id": 2,
16
  "pad_type_id": 0,
17
- "pad_token": "</s>"
18
  },
19
  "added_tokens": [
20
  {
 
12
  },
13
  "direction": "Right",
14
  "pad_to_multiple_of": null,
15
+ "pad_id": 0,
16
  "pad_type_id": 0,
17
+ "pad_token": "<unk>"
18
  },
19
  "added_tokens": [
20
  {
tokenizer_config.json CHANGED
@@ -34,7 +34,7 @@
34
  "extra_special_tokens": {},
35
  "legacy": false,
36
  "model_max_length": 2048,
37
- "pad_token": "</s>",
38
  "padding_side": "right",
39
  "sp_model_kwargs": {},
40
  "tokenizer_class": "LlamaTokenizer",
 
34
  "extra_special_tokens": {},
35
  "legacy": false,
36
  "model_max_length": 2048,
37
+ "pad_token": "<unk>",
38
  "padding_side": "right",
39
  "sp_model_kwargs": {},
40
  "tokenizer_class": "LlamaTokenizer",