Maziko commited on
Commit
352116b
·
verified ·
1 Parent(s): e849166

Upload folder using huggingface_hub

Browse files
added_tokens.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "</think>": 151668,
3
  "</tool_call>": 151658,
4
- ! "</tool_response>": 151666,
5
  "<think>": 151667,
6
  "<tool_call>": 151657,
7
  "<tool_response>": 151665,
@@ -25,4 +25,4 @@
25
  "<|vision_end|>": 151653,
26
  "<|vision_pad|>": 151654,
27
  "<|vision_start|>": 151652
28
- }
 
1
  {
2
  "</think>": 151668,
3
  "</tool_call>": 151658,
4
+ "</tool_response>": 151666,
5
  "<think>": 151667,
6
  "<tool_call>": 151657,
7
  "<tool_response>": 151665,
 
25
  "<|vision_end|>": 151653,
26
  "<|vision_pad|>": 151654,
27
  "<|vision_start|>": 151652
28
+ }
config.json CHANGED
@@ -62,8 +62,8 @@
62
  "sliding_window": null,
63
  "tie_word_embeddings": true,
64
  "torch_dtype": "bfloat16",
65
- "transformert_version": "4.55.4",
66
  "use_cache": false,
67
  "use_sliding_window": false,
68
  "vocab_size": 151936
69
- }
 
62
  "sliding_window": null,
63
  "tie_word_embeddings": true,
64
  "torch_dtype": "bfloat16",
65
+ "transformers_version": "4.55.4",
66
  "use_cache": false,
67
  "use_sliding_window": false,
68
  "vocab_size": 151936
69
+ }
generation_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "do_sample": true,
3
  "eos_token_id": [
4
- 151745,
5
  151643
6
  ],
7
  "pad_token_id": 151643,
@@ -9,4 +9,4 @@
9
  "top_k": 20,
10
  "top_p": 0.8,
11
  "transformers_version": "4.55.4"
12
- }
 
1
  {
2
  "do_sample": true,
3
  "eos_token_id": [
4
+ 151645,
5
  151643
6
  ],
7
  "pad_token_id": 151643,
 
9
  "top_k": 20,
10
  "top_p": 0.8,
11
  "transformers_version": "4.55.4"
12
+ }
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_parameters": 4411424266,
4
  "total_size": 8822848512
5
  },
6
  "weight_map": {
@@ -404,4 +404,4 @@
404
  "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
405
  "model.norm.weight": "model-00001-of-00002.safetensors"
406
  }
407
- }
 
1
  {
2
  "metadata": {
3
+ "total_parameters": 4411424256,
4
  "total_size": 8822848512
5
  },
6
  "weight_map": {
 
404
  "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
405
  "model.norm.weight": "model-00001-of-00002.safetensors"
406
  }
407
+ }
special_tokens_map.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "additional_special_tokens": [
3
- "<|im_start}>",
4
  "<|im_end|>",
5
  "<|object_ref_start|>",
6
  "<|object_ref_end|>",
@@ -28,4 +28,4 @@
28
  "rstrip": false,
29
  "single_word": false
30
  }
31
- }
 
1
  {
2
  "additional_special_tokens": [
3
+ "<|im_start|>",
4
  "<|im_end|>",
5
  "<|object_ref_start|>",
6
  "<|object_ref_end|>",
 
28
  "rstrip": false,
29
  "single_word": false
30
  }
31
+ }
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5de371d782636ee4ded00369134a9a9ab8bd99512d4888e621c3f893f477339d
3
- size 11422654
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23777c5162632763d59cafcea5342a5d774089a3322758c04eb12e757f8f1cd8
3
+ size 11422655
tokenizer_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "add_bos_token": false,
3
- "add_prefix_space": galse,
4
  "added_tokens_decoder": {
5
  "151643": {
6
  "content": "<|endoftext|>",
@@ -236,4 +236,4 @@
236
  "split_special_tokens": false,
237
  "tokenizer_class": "Qwen2Tokenizer",
238
  "unk_token": null
239
- }
 
1
  {
2
  "add_bos_token": false,
3
+ "add_prefix_space": false,
4
  "added_tokens_decoder": {
5
  "151643": {
6
  "content": "<|endoftext|>",
 
236
  "split_special_tokens": false,
237
  "tokenizer_class": "Qwen2Tokenizer",
238
  "unk_token": null
239
+ }
vocab.json CHANGED
The diff for this file is too large to render. See raw diff