Blancy commited on
Commit
d6ec070
·
verified ·
1 Parent(s): 01a01db

Training in progress, epoch 0

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "Qwen/Qwen2.5-1.5B-Instruct",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
@@ -23,7 +23,7 @@
23
  "tie_word_embeddings": true,
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.49.0",
26
- "use_cache": true,
27
  "use_sliding_window": false,
28
  "vocab_size": 151936
29
  }
 
1
  {
2
+ "_name_or_path": "/dengwenbo/224015062/open-r1-main/data/Qwen2.5-1.5B-Open-R1-Distill",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
 
23
  "tie_word_embeddings": true,
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.49.0",
26
+ "use_cache": false,
27
  "use_sliding_window": false,
28
  "vocab_size": 151936
29
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:175a0465357f8f85e18cf2af55d5468ab756f79a919f1eb81fd5d3a9be44b8e6
3
  size 3554214752
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ed11b891ce188a84c33064c8e9118704a03168043400f9ccd0c216c075fc07f
3
  size 3554214752
special_tokens_map.json CHANGED
@@ -22,7 +22,7 @@
22
  "single_word": false
23
  },
24
  "pad_token": {
25
- "content": "<|endoftext|>",
26
  "lstrip": false,
27
  "normalized": false,
28
  "rstrip": false,
 
22
  "single_word": false
23
  },
24
  "pad_token": {
25
+ "content": "<|im_end|>",
26
  "lstrip": false,
27
  "normalized": false,
28
  "rstrip": false,
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5eee858c5123a4279c3e1f7b81247343f356ac767940b2692a928ad929543214
3
- size 11422063
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63a2951d5edfa5cc0a2346ef872f8c77a2920274cfc3b503b04e3799104dee80
3
+ size 11422060
tokenizer_config.json CHANGED
@@ -201,7 +201,7 @@
201
  "errors": "replace",
202
  "extra_special_tokens": {},
203
  "model_max_length": 131072,
204
- "pad_token": "<|endoftext|>",
205
  "split_special_tokens": false,
206
  "tokenizer_class": "Qwen2Tokenizer",
207
  "unk_token": null
 
201
  "errors": "replace",
202
  "extra_special_tokens": {},
203
  "model_max_length": 131072,
204
+ "pad_token": "<|im_end|>",
205
  "split_special_tokens": false,
206
  "tokenizer_class": "Qwen2Tokenizer",
207
  "unk_token": null
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:137cd08454ccd0a5edffb59f99f53f77ebc0d9d2a9d31ef17675ac85147d839f
3
  size 7992
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6ae1b0edde2fb221a1a75daac02aad68b90a4413c3a441c1bef158e2ad0fe9c
3
  size 7992