DatPySci commited on
Commit
c065dbd
·
verified ·
1 Parent(s): e516cd9

upload sft

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +32 -0
  2. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/data.pt +3 -0
  3. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/extra_state_world_size_4_rank_0.pt +3 -0
  4. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/extra_state_world_size_4_rank_1.pt +3 -0
  5. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/extra_state_world_size_4_rank_2.pt +3 -0
  6. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/extra_state_world_size_4_rank_3.pt +3 -0
  7. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/fsdp_config.json +4 -0
  8. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/added_tokens.json +24 -0
  9. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/chat_template.jinja +54 -0
  10. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/config.json +66 -0
  11. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/generation_config.json +14 -0
  12. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/merges.txt +0 -0
  13. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/special_tokens_map.json +31 -0
  14. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/tokenizer.json +3 -0
  15. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/tokenizer_config.json +207 -0
  16. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/vocab.json +0 -0
  17. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/model_world_size_4_rank_0.pt +3 -0
  18. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/model_world_size_4_rank_1.pt +3 -0
  19. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/model_world_size_4_rank_2.pt +3 -0
  20. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/model_world_size_4_rank_3.pt +3 -0
  21. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/optim_world_size_4_rank_0.pt +3 -0
  22. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/optim_world_size_4_rank_1.pt +3 -0
  23. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/optim_world_size_4_rank_2.pt +3 -0
  24. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/optim_world_size_4_rank_3.pt +3 -0
  25. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/data.pt +3 -0
  26. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/extra_state_world_size_4_rank_0.pt +3 -0
  27. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/extra_state_world_size_4_rank_1.pt +3 -0
  28. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/extra_state_world_size_4_rank_2.pt +3 -0
  29. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/extra_state_world_size_4_rank_3.pt +3 -0
  30. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/fsdp_config.json +4 -0
  31. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/added_tokens.json +24 -0
  32. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/chat_template.jinja +54 -0
  33. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/config.json +66 -0
  34. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/generation_config.json +14 -0
  35. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/merges.txt +0 -0
  36. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/special_tokens_map.json +31 -0
  37. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/tokenizer.json +3 -0
  38. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/tokenizer_config.json +207 -0
  39. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/vocab.json +0 -0
  40. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/model_world_size_4_rank_0.pt +3 -0
  41. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/model_world_size_4_rank_1.pt +3 -0
  42. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/model_world_size_4_rank_2.pt +3 -0
  43. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/model_world_size_4_rank_3.pt +3 -0
  44. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/optim_world_size_4_rank_0.pt +3 -0
  45. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/optim_world_size_4_rank_1.pt +3 -0
  46. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/optim_world_size_4_rank_2.pt +3 -0
  47. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/optim_world_size_4_rank_3.pt +3 -0
  48. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_248/data.pt +3 -0
  49. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_248/extra_state_world_size_4_rank_0.pt +3 -0
  50. SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_248/extra_state_world_size_4_rank_1.pt +3 -0
.gitattributes CHANGED
@@ -285,3 +285,35 @@ gen_outputs/step512/lora_polaris_rollout_sz16_rank64_step512_adamw/olympiad-benc
285
  gen_outputs/step512/lora_polaris_rollout_sz16_rank8_step512_adamw/math-500_t0.6_p0.95_n192-MNT4096.jsonl filter=lfs diff=lfs merge=lfs -text
286
  gen_outputs/step512/lora_polaris_rollout_sz16_rank8_step512_adamw/minerva_t0.6_p0.95_n192-MNT4096.jsonl filter=lfs diff=lfs merge=lfs -text
287
  gen_outputs/step512/lora_polaris_rollout_sz16_rank8_step512_adamw/olympiad-bench_t0.6_p0.95_n192-MNT4096.jsonl filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
285
  gen_outputs/step512/lora_polaris_rollout_sz16_rank8_step512_adamw/math-500_t0.6_p0.95_n192-MNT4096.jsonl filter=lfs diff=lfs merge=lfs -text
286
  gen_outputs/step512/lora_polaris_rollout_sz16_rank8_step512_adamw/minerva_t0.6_p0.95_n192-MNT4096.jsonl filter=lfs diff=lfs merge=lfs -text
287
  gen_outputs/step512/lora_polaris_rollout_sz16_rank8_step512_adamw/olympiad-bench_t0.6_p0.95_n192-MNT4096.jsonl filter=lfs diff=lfs merge=lfs -text
288
+ SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
289
+ SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
290
+ SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_248/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
291
+ SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_310/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
292
+ SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_372/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
293
+ SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_434/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
294
+ SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_496/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
295
+ SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_62/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
296
+ SFT/Qwen2.5-3B-Instruct-s1k_32/global_step_124/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
297
+ SFT/Qwen2.5-3B-Instruct-s1k_32/global_step_186/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
298
+ SFT/Qwen2.5-3B-Instruct-s1k_32/global_step_248/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
299
+ SFT/Qwen2.5-3B-Instruct-s1k_32/global_step_310/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
300
+ SFT/Qwen2.5-3B-Instruct-s1k_32/global_step_372/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
301
+ SFT/Qwen2.5-3B-Instruct-s1k_32/global_step_434/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
302
+ SFT/Qwen2.5-3B-Instruct-s1k_32/global_step_496/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
303
+ SFT/Qwen2.5-3B-Instruct-s1k_32/global_step_62/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
304
+ SFT/Qwen2.5-3B-Instruct-s1k_64/global_step_124/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
305
+ SFT/Qwen2.5-3B-Instruct-s1k_64/global_step_186/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
306
+ SFT/Qwen2.5-3B-Instruct-s1k_64/global_step_248/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
307
+ SFT/Qwen2.5-3B-Instruct-s1k_64/global_step_310/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
308
+ SFT/Qwen2.5-3B-Instruct-s1k_64/global_step_372/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
309
+ SFT/Qwen2.5-3B-Instruct-s1k_64/global_step_434/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
310
+ SFT/Qwen2.5-3B-Instruct-s1k_64/global_step_496/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
311
+ SFT/Qwen2.5-3B-Instruct-s1k_64/global_step_62/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
312
+ SFT/Qwen2.5-3B-Instruct-s1k_8/global_step_124/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
313
+ SFT/Qwen2.5-3B-Instruct-s1k_8/global_step_186/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
314
+ SFT/Qwen2.5-3B-Instruct-s1k_8/global_step_248/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
315
+ SFT/Qwen2.5-3B-Instruct-s1k_8/global_step_310/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
316
+ SFT/Qwen2.5-3B-Instruct-s1k_8/global_step_372/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
317
+ SFT/Qwen2.5-3B-Instruct-s1k_8/global_step_434/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
318
+ SFT/Qwen2.5-3B-Instruct-s1k_8/global_step_496/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
319
+ SFT/Qwen2.5-3B-Instruct-s1k_8/global_step_62/huggingface/tokenizer.json filter=lfs diff=lfs merge=lfs -text
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/data.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e7991d21c35f64c9de0ce6a751cc59b2b23238aa9be9c0394afb730fb15f629
3
+ size 1947
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/extra_state_world_size_4_rank_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b95aed0b0b8a3a3ff630c7ca917d0927346cb7a8d4bd7be10c204872c7ca06e3
3
+ size 15141
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/extra_state_world_size_4_rank_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:faa19870d8728af9bf7bdc1e05bd8754a614f6a79345b88f03c494c890d4eb1a
3
+ size 15077
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/extra_state_world_size_4_rank_2.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de98d66e499dc109dbfaa5646f3e744761428cfebed012309181feb822f6c8dc
3
+ size 15077
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/extra_state_world_size_4_rank_3.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4aba77a369e6081fe8d254c799238d05375d18064d17066ea0f9df26909957d1
3
+ size 15141
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/fsdp_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "FSDP_version": 2,
3
+ "world_size": 4
4
+ }
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/chat_template.jinja ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- if tools %}
2
+ {{- '<|im_start|>system\n' }}
3
+ {%- if messages[0]['role'] == 'system' %}
4
+ {{- messages[0]['content'] }}
5
+ {%- else %}
6
+ {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}
7
+ {%- endif %}
8
+ {{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
9
+ {%- for tool in tools %}
10
+ {{- "\n" }}
11
+ {{- tool | tojson }}
12
+ {%- endfor %}
13
+ {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
14
+ {%- else %}
15
+ {%- if messages[0]['role'] == 'system' %}
16
+ {{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}
17
+ {%- else %}
18
+ {{- '<|im_start|>system\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\n' }}
19
+ {%- endif %}
20
+ {%- endif %}
21
+ {%- for message in messages %}
22
+ {%- if (message.role == "user") or (message.role == "system" and not loop.first) or (message.role == "assistant" and not message.tool_calls) %}
23
+ {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}
24
+ {%- elif message.role == "assistant" %}
25
+ {{- '<|im_start|>' + message.role }}
26
+ {%- if message.content %}
27
+ {{- '\n' + message.content }}
28
+ {%- endif %}
29
+ {%- for tool_call in message.tool_calls %}
30
+ {%- if tool_call.function is defined %}
31
+ {%- set tool_call = tool_call.function %}
32
+ {%- endif %}
33
+ {{- '\n<tool_call>\n{"name": "' }}
34
+ {{- tool_call.name }}
35
+ {{- '", "arguments": ' }}
36
+ {{- tool_call.arguments | tojson }}
37
+ {{- '}\n</tool_call>' }}
38
+ {%- endfor %}
39
+ {{- '<|im_end|>\n' }}
40
+ {%- elif message.role == "tool" %}
41
+ {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %}
42
+ {{- '<|im_start|>user' }}
43
+ {%- endif %}
44
+ {{- '\n<tool_response>\n' }}
45
+ {{- message.content }}
46
+ {{- '\n</tool_response>' }}
47
+ {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
48
+ {{- '<|im_end|>\n' }}
49
+ {%- endif %}
50
+ {%- endif %}
51
+ {%- endfor %}
52
+ {%- if add_generation_prompt %}
53
+ {{- '<|im_start|>assistant\n' }}
54
+ {%- endif %}
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/config.json ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2ForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 151643,
7
+ "dtype": "float32",
8
+ "eos_token_id": 151645,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 2048,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 11008,
13
+ "layer_types": [
14
+ "full_attention",
15
+ "full_attention",
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention",
41
+ "full_attention",
42
+ "full_attention",
43
+ "full_attention",
44
+ "full_attention",
45
+ "full_attention",
46
+ "full_attention",
47
+ "full_attention",
48
+ "full_attention",
49
+ "full_attention"
50
+ ],
51
+ "max_position_embeddings": 32768,
52
+ "max_window_layers": 70,
53
+ "model_type": "qwen2",
54
+ "num_attention_heads": 16,
55
+ "num_hidden_layers": 36,
56
+ "num_key_value_heads": 2,
57
+ "rms_norm_eps": 1e-06,
58
+ "rope_scaling": null,
59
+ "rope_theta": 1000000.0,
60
+ "sliding_window": null,
61
+ "tie_word_embeddings": true,
62
+ "transformers_version": "4.57.6",
63
+ "use_cache": true,
64
+ "use_sliding_window": false,
65
+ "vocab_size": 151936
66
+ }
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "repetition_penalty": 1.05,
10
+ "temperature": 0.7,
11
+ "top_k": 20,
12
+ "top_p": 0.8,
13
+ "transformers_version": "4.57.6"
14
+ }
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/tokenizer_config.json ADDED
@@ -0,0 +1,207 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "clean_up_tokenization_spaces": false,
199
+ "eos_token": "<|im_end|>",
200
+ "errors": "replace",
201
+ "extra_special_tokens": {},
202
+ "model_max_length": 131072,
203
+ "pad_token": "<|endoftext|>",
204
+ "split_special_tokens": false,
205
+ "tokenizer_class": "Qwen2Tokenizer",
206
+ "unk_token": null
207
+ }
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/huggingface/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/model_world_size_4_rank_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:58e14fea3a397fdbd8223cf51ed778799c6a17207b63060e9705ee559f75d1f5
3
+ size 3116579099
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/model_world_size_4_rank_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7c524e5189dd87a6e19fd7ea2bc073f9b571ecd592425b63b0d691946996387
3
+ size 3116579099
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/model_world_size_4_rank_2.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5ee3c734ffc6462266d395f6927873cbcfdf7719ebe67dac2e537c14788270d
3
+ size 3116579099
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/model_world_size_4_rank_3.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44b40ac0d5d19fb5d02746c2ee7b5ae902bbfcd3713c1344123315a40b863978
3
+ size 3116579099
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/optim_world_size_4_rank_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61d7a2807934299df9d6fda58a2d434fb8bd9a0b356a1a1d6c270dfc15083508
3
+ size 60448572
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/optim_world_size_4_rank_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2321e877afaf3454bc0b69a105eddb31b9ef514c54dd84060dc707cfa43b797a
3
+ size 60448572
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/optim_world_size_4_rank_2.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef450d561879b411a8361faf2bdf79d2221038213d577ae43e96101888e99d30
3
+ size 60448572
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_124/optim_world_size_4_rank_3.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dcce869b166667a1d78f643de3d503e16653a557b55d6df235d6b721d1783f87
3
+ size 60448572
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/data.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22fe19de974e34fe94fbad6fe98c6a2d054b2cba21415f03443cd8df12031f25
3
+ size 1947
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/extra_state_world_size_4_rank_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c32a2e9ea54d4fd1ce5f1ac3f95d7d0d2eacc0a95091a566e3d22264a80fa24
3
+ size 15141
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/extra_state_world_size_4_rank_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8c78e11edaa5958131cad5cb425fdbc444b258f9ad903e27e631db8c819e4f9
3
+ size 15077
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/extra_state_world_size_4_rank_2.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8709d74f3f3f128067c4694d804857e86de45c7a69e2af0791c0447f7bbe84f
3
+ size 15077
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/extra_state_world_size_4_rank_3.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5a00c929423ed583901cf7c0782e7dce2e3e0faad1dff626cd853c6e5d707007
3
+ size 15141
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/fsdp_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "FSDP_version": 2,
3
+ "world_size": 4
4
+ }
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/chat_template.jinja ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- if tools %}
2
+ {{- '<|im_start|>system\n' }}
3
+ {%- if messages[0]['role'] == 'system' %}
4
+ {{- messages[0]['content'] }}
5
+ {%- else %}
6
+ {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}
7
+ {%- endif %}
8
+ {{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
9
+ {%- for tool in tools %}
10
+ {{- "\n" }}
11
+ {{- tool | tojson }}
12
+ {%- endfor %}
13
+ {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
14
+ {%- else %}
15
+ {%- if messages[0]['role'] == 'system' %}
16
+ {{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}
17
+ {%- else %}
18
+ {{- '<|im_start|>system\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\n' }}
19
+ {%- endif %}
20
+ {%- endif %}
21
+ {%- for message in messages %}
22
+ {%- if (message.role == "user") or (message.role == "system" and not loop.first) or (message.role == "assistant" and not message.tool_calls) %}
23
+ {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}
24
+ {%- elif message.role == "assistant" %}
25
+ {{- '<|im_start|>' + message.role }}
26
+ {%- if message.content %}
27
+ {{- '\n' + message.content }}
28
+ {%- endif %}
29
+ {%- for tool_call in message.tool_calls %}
30
+ {%- if tool_call.function is defined %}
31
+ {%- set tool_call = tool_call.function %}
32
+ {%- endif %}
33
+ {{- '\n<tool_call>\n{"name": "' }}
34
+ {{- tool_call.name }}
35
+ {{- '", "arguments": ' }}
36
+ {{- tool_call.arguments | tojson }}
37
+ {{- '}\n</tool_call>' }}
38
+ {%- endfor %}
39
+ {{- '<|im_end|>\n' }}
40
+ {%- elif message.role == "tool" %}
41
+ {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %}
42
+ {{- '<|im_start|>user' }}
43
+ {%- endif %}
44
+ {{- '\n<tool_response>\n' }}
45
+ {{- message.content }}
46
+ {{- '\n</tool_response>' }}
47
+ {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
48
+ {{- '<|im_end|>\n' }}
49
+ {%- endif %}
50
+ {%- endif %}
51
+ {%- endfor %}
52
+ {%- if add_generation_prompt %}
53
+ {{- '<|im_start|>assistant\n' }}
54
+ {%- endif %}
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/config.json ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2ForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 151643,
7
+ "dtype": "float32",
8
+ "eos_token_id": 151645,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 2048,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 11008,
13
+ "layer_types": [
14
+ "full_attention",
15
+ "full_attention",
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention",
41
+ "full_attention",
42
+ "full_attention",
43
+ "full_attention",
44
+ "full_attention",
45
+ "full_attention",
46
+ "full_attention",
47
+ "full_attention",
48
+ "full_attention",
49
+ "full_attention"
50
+ ],
51
+ "max_position_embeddings": 32768,
52
+ "max_window_layers": 70,
53
+ "model_type": "qwen2",
54
+ "num_attention_heads": 16,
55
+ "num_hidden_layers": 36,
56
+ "num_key_value_heads": 2,
57
+ "rms_norm_eps": 1e-06,
58
+ "rope_scaling": null,
59
+ "rope_theta": 1000000.0,
60
+ "sliding_window": null,
61
+ "tie_word_embeddings": true,
62
+ "transformers_version": "4.57.6",
63
+ "use_cache": true,
64
+ "use_sliding_window": false,
65
+ "vocab_size": 151936
66
+ }
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "repetition_penalty": 1.05,
10
+ "temperature": 0.7,
11
+ "top_k": 20,
12
+ "top_p": 0.8,
13
+ "transformers_version": "4.57.6"
14
+ }
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/tokenizer_config.json ADDED
@@ -0,0 +1,207 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "clean_up_tokenization_spaces": false,
199
+ "eos_token": "<|im_end|>",
200
+ "errors": "replace",
201
+ "extra_special_tokens": {},
202
+ "model_max_length": 131072,
203
+ "pad_token": "<|endoftext|>",
204
+ "split_special_tokens": false,
205
+ "tokenizer_class": "Qwen2Tokenizer",
206
+ "unk_token": null
207
+ }
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/huggingface/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/model_world_size_4_rank_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da08b8fc0edab11c14570ca3efea89d0ace6bfce0599e15d2d6a0b80c0444201
3
+ size 3116579099
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/model_world_size_4_rank_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec6dcf6aad4c4fa2c55267634a5b26b97ad729f4b6c38d41a54722f3033f61b8
3
+ size 3116579099
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/model_world_size_4_rank_2.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4f38c644ec59e724ab7a40a50be10f2d20fe3ff539d6c446c64aa8871c34c2e
3
+ size 3116579099
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/model_world_size_4_rank_3.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:adc92961c1e93974f49fe93aa1e149d0cd000a4d03a2d0609ebdd7de8b5e0582
3
+ size 3116579099
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/optim_world_size_4_rank_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6b03c5f92908cacca6885c72b9e4071c4645393b92bb1807955274cf6f1f0a1
3
+ size 60448572
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/optim_world_size_4_rank_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b970e021af911d687fe53428c15ef6f1e4301642c44cf98fa16f2abaa3b50ad7
3
+ size 60448572
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/optim_world_size_4_rank_2.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4775ad7618b7f0d2c24977d74f971081820af1d18254bc18db26ab2d4d412389
3
+ size 60448572
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_186/optim_world_size_4_rank_3.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f3a2707ac2ce0f4e9d2c9cf0bc8d2e872bb0e46947ebc6cdf80b2c373d1a516
3
+ size 60448572
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_248/data.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b636a1959196af9424c5a03e6df2e00a22133cbf53c6c66a263db0867dc1bcaf
3
+ size 1947
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_248/extra_state_world_size_4_rank_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c208afed273bc5abec29ef1bfba9817186ae3bff518963ba7e02fa6e6f2301ea
3
+ size 15141
SFT/Qwen2.5-3B-Instruct-s1k_16/global_step_248/extra_state_world_size_4_rank_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f41f34ec212b1095de3c2a21106eeca44455a086c67334363dc47eb2d6194ed6
3
+ size 15077