stk5 commited on
Commit
bb4fd8c
·
verified ·
1 Parent(s): 9573ac8

Add files using upload-large-folder tool

Browse files
model-00001-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:101cdbb4077db22723914c5c74bc94e1cabf23e80e37607c95e0c713264482a2
3
  size 4976698672
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f395a4a5628812411a2cefaf3ae2c53b04a5543fd376f904a7a44fd266af5a1e
3
  size 4976698672
model-00002-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:965369d6c0e4da9c55b87ed2667be536bb4aad5f7cb629e47b617e9f88cbde09
3
  size 4999802720
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a891683959c7f58b06bb50127e7e231a156503ed5b8d2f705c47a1d44613b234
3
  size 4999802720
model-00003-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:49b004851aeec693c177902fc5dc0e591d85aa22f891c14995ffb8d1d2847f4c
3
  size 4915916176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33c5875340a13f7e57a602e7549e95f35058e1539930aa39ed1fc48d2cd4edc8
3
  size 4915916176
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:384a7e7c676f7be2e5d2e8449c508be9b00e5b18c5b3c39ebc626e96b3f4b988
3
- size 17210019
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
+ size 17209920
tokenizer_config.json CHANGED
@@ -2053,7 +2053,6 @@
2053
  "chat_template": "{{- bos_token }}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{{messages[0]['content']|trim}}{{- \"<|start_header_id|>user<|end_header_id|>\\n\\n\" -}}\n{{messages[1]['content']|trim}}<|eot_id|>{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- if messages[2] is defined %}\n{{messages[2]['content']|trim}}<|eot_id|>{%- endif %}{%- endif %}\n",
2054
  "clean_up_tokenization_spaces": true,
2055
  "eos_token": "<|eot_id|>",
2056
- "max_length": 2048,
2057
  "model_input_names": [
2058
  "input_ids",
2059
  "attention_mask"
@@ -2061,8 +2060,5 @@
2061
  "model_max_length": 131072,
2062
  "pad_token": "<|finetune_right_pad_id|>",
2063
  "padding_side": "right",
2064
- "stride": 0,
2065
- "tokenizer_class": "PreTrainedTokenizerFast",
2066
- "truncation_side": "right",
2067
- "truncation_strategy": "longest_first"
2068
  }
 
2053
  "chat_template": "{{- bos_token }}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{{messages[0]['content']|trim}}{{- \"<|start_header_id|>user<|end_header_id|>\\n\\n\" -}}\n{{messages[1]['content']|trim}}<|eot_id|>{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- if messages[2] is defined %}\n{{messages[2]['content']|trim}}<|eot_id|>{%- endif %}{%- endif %}\n",
2054
  "clean_up_tokenization_spaces": true,
2055
  "eos_token": "<|eot_id|>",
 
2056
  "model_input_names": [
2057
  "input_ids",
2058
  "attention_mask"
 
2060
  "model_max_length": 131072,
2061
  "pad_token": "<|finetune_right_pad_id|>",
2062
  "padding_side": "right",
2063
+ "tokenizer_class": "PreTrainedTokenizerFast"
 
 
 
2064
  }