DatPySci commited on
Commit
c210be0
·
verified ·
1 Parent(s): fcd4313

upload evolm

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_100/actor_hf/chat_template.jinja +4 -0
  2. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_100/actor_hf/config.json +30 -0
  3. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_100/actor_hf/generation_config.json +6 -0
  4. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_100/actor_hf/model.safetensors +3 -0
  5. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_100/actor_hf/special_tokens_map.json +30 -0
  6. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_100/actor_hf/tokenizer.json +0 -0
  7. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_100/actor_hf/tokenizer.model +3 -0
  8. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_100/actor_hf/tokenizer_config.json +44 -0
  9. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_150/actor_hf/chat_template.jinja +4 -0
  10. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_150/actor_hf/config.json +30 -0
  11. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_150/actor_hf/generation_config.json +6 -0
  12. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_150/actor_hf/model.safetensors +3 -0
  13. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_150/actor_hf/special_tokens_map.json +30 -0
  14. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_150/actor_hf/tokenizer.json +0 -0
  15. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_150/actor_hf/tokenizer.model +3 -0
  16. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_150/actor_hf/tokenizer_config.json +44 -0
  17. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_200/actor_hf/chat_template.jinja +4 -0
  18. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_200/actor_hf/config.json +30 -0
  19. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_200/actor_hf/generation_config.json +6 -0
  20. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_200/actor_hf/model.safetensors +3 -0
  21. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_200/actor_hf/special_tokens_map.json +30 -0
  22. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_200/actor_hf/tokenizer.json +0 -0
  23. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_200/actor_hf/tokenizer.model +3 -0
  24. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_200/actor_hf/tokenizer_config.json +44 -0
  25. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_250/actor_hf/chat_template.jinja +4 -0
  26. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_250/actor_hf/config.json +30 -0
  27. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_250/actor_hf/generation_config.json +6 -0
  28. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_250/actor_hf/model.safetensors +3 -0
  29. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_250/actor_hf/special_tokens_map.json +30 -0
  30. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_250/actor_hf/tokenizer.json +0 -0
  31. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_250/actor_hf/tokenizer.model +3 -0
  32. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_250/actor_hf/tokenizer_config.json +44 -0
  33. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_300/actor_hf/chat_template.jinja +4 -0
  34. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_300/actor_hf/config.json +30 -0
  35. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_300/actor_hf/generation_config.json +6 -0
  36. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_300/actor_hf/model.safetensors +3 -0
  37. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_300/actor_hf/special_tokens_map.json +30 -0
  38. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_300/actor_hf/tokenizer.json +0 -0
  39. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_300/actor_hf/tokenizer.model +3 -0
  40. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_300/actor_hf/tokenizer_config.json +44 -0
  41. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_50/actor_hf/chat_template.jinja +4 -0
  42. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_50/actor_hf/config.json +30 -0
  43. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_50/actor_hf/generation_config.json +6 -0
  44. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_50/actor_hf/model.safetensors +3 -0
  45. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_50/actor_hf/special_tokens_map.json +30 -0
  46. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_50/actor_hf/tokenizer.json +0 -0
  47. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_50/actor_hf/tokenizer.model +3 -0
  48. evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_50/actor_hf/tokenizer_config.json +44 -0
  49. evolm-1b/EvoLM-1B-CPT-160BT-omega-GRPO/global_step_100/actor_hf/chat_template.jinja +4 -0
  50. evolm-1b/EvoLM-1B-CPT-160BT-omega-GRPO/global_step_100/actor_hf/config.json +30 -0
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_100/actor_hf/chat_template.jinja ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ system_message + '
2
+ ' }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ 'Human: ' + content + '
3
+ Assistant:' }}{% elif message['role'] == 'assistant' %}{{ content + '</s>' + '
4
+ ' }}{% endif %}{% endfor %}
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_100/actor_hf/config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "dtype": "bfloat16",
9
+ "eos_token_id": 2,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 4896,
15
+ "max_position_embeddings": 2048,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 32,
19
+ "num_hidden_layers": 22,
20
+ "num_key_value_heads": 4,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_scaling": null,
25
+ "rope_theta": 10000.0,
26
+ "tie_word_embeddings": false,
27
+ "transformers_version": "4.57.6",
28
+ "use_cache": false,
29
+ "vocab_size": 32000
30
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_100/actor_hf/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.57.6"
6
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_100/actor_hf/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90ad2192aa671e304577dc6b8095586cc75be12aa0ea06853de401464b23d4a7
3
+ size 2001152552
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_100/actor_hf/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_100/actor_hf/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_100/actor_hf/tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_100/actor_hf/tokenizer_config.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ }
30
+ },
31
+ "bos_token": "<s>",
32
+ "clean_up_tokenization_spaces": false,
33
+ "eos_token": "</s>",
34
+ "extra_special_tokens": {},
35
+ "legacy": false,
36
+ "model_max_length": 1000000000000000019884624838656,
37
+ "pad_token": "</s>",
38
+ "padding_side": "right",
39
+ "sp_model_kwargs": {},
40
+ "split_special_tokens": false,
41
+ "tokenizer_class": "LlamaTokenizer",
42
+ "unk_token": "<unk>",
43
+ "use_default_system_prompt": false
44
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_150/actor_hf/chat_template.jinja ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ system_message + '
2
+ ' }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ 'Human: ' + content + '
3
+ Assistant:' }}{% elif message['role'] == 'assistant' %}{{ content + '</s>' + '
4
+ ' }}{% endif %}{% endfor %}
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_150/actor_hf/config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "dtype": "bfloat16",
9
+ "eos_token_id": 2,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 4896,
15
+ "max_position_embeddings": 2048,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 32,
19
+ "num_hidden_layers": 22,
20
+ "num_key_value_heads": 4,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_scaling": null,
25
+ "rope_theta": 10000.0,
26
+ "tie_word_embeddings": false,
27
+ "transformers_version": "4.57.6",
28
+ "use_cache": false,
29
+ "vocab_size": 32000
30
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_150/actor_hf/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.57.6"
6
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_150/actor_hf/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ab135c2fe517d6e44b7a3f4e511615848d3fe7fa4c8105f83201edfc5a6d7e9
3
+ size 2001152552
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_150/actor_hf/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_150/actor_hf/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_150/actor_hf/tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_150/actor_hf/tokenizer_config.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ }
30
+ },
31
+ "bos_token": "<s>",
32
+ "clean_up_tokenization_spaces": false,
33
+ "eos_token": "</s>",
34
+ "extra_special_tokens": {},
35
+ "legacy": false,
36
+ "model_max_length": 1000000000000000019884624838656,
37
+ "pad_token": "</s>",
38
+ "padding_side": "right",
39
+ "sp_model_kwargs": {},
40
+ "split_special_tokens": false,
41
+ "tokenizer_class": "LlamaTokenizer",
42
+ "unk_token": "<unk>",
43
+ "use_default_system_prompt": false
44
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_200/actor_hf/chat_template.jinja ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ system_message + '
2
+ ' }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ 'Human: ' + content + '
3
+ Assistant:' }}{% elif message['role'] == 'assistant' %}{{ content + '</s>' + '
4
+ ' }}{% endif %}{% endfor %}
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_200/actor_hf/config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "dtype": "bfloat16",
9
+ "eos_token_id": 2,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 4896,
15
+ "max_position_embeddings": 2048,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 32,
19
+ "num_hidden_layers": 22,
20
+ "num_key_value_heads": 4,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_scaling": null,
25
+ "rope_theta": 10000.0,
26
+ "tie_word_embeddings": false,
27
+ "transformers_version": "4.57.6",
28
+ "use_cache": false,
29
+ "vocab_size": 32000
30
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_200/actor_hf/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.57.6"
6
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_200/actor_hf/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ce7676aa913e67821a1e25b65d9cc67823dade89cd8897b6681cf3d3c097a6c
3
+ size 2001152552
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_200/actor_hf/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_200/actor_hf/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_200/actor_hf/tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_200/actor_hf/tokenizer_config.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ }
30
+ },
31
+ "bos_token": "<s>",
32
+ "clean_up_tokenization_spaces": false,
33
+ "eos_token": "</s>",
34
+ "extra_special_tokens": {},
35
+ "legacy": false,
36
+ "model_max_length": 1000000000000000019884624838656,
37
+ "pad_token": "</s>",
38
+ "padding_side": "right",
39
+ "sp_model_kwargs": {},
40
+ "split_special_tokens": false,
41
+ "tokenizer_class": "LlamaTokenizer",
42
+ "unk_token": "<unk>",
43
+ "use_default_system_prompt": false
44
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_250/actor_hf/chat_template.jinja ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ system_message + '
2
+ ' }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ 'Human: ' + content + '
3
+ Assistant:' }}{% elif message['role'] == 'assistant' %}{{ content + '</s>' + '
4
+ ' }}{% endif %}{% endfor %}
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_250/actor_hf/config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "dtype": "bfloat16",
9
+ "eos_token_id": 2,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 4896,
15
+ "max_position_embeddings": 2048,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 32,
19
+ "num_hidden_layers": 22,
20
+ "num_key_value_heads": 4,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_scaling": null,
25
+ "rope_theta": 10000.0,
26
+ "tie_word_embeddings": false,
27
+ "transformers_version": "4.57.6",
28
+ "use_cache": false,
29
+ "vocab_size": 32000
30
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_250/actor_hf/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.57.6"
6
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_250/actor_hf/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f00e4afebb6917bc56464d543490950b3264a7df8f653a56860bbc222b83696f
3
+ size 2001152552
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_250/actor_hf/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_250/actor_hf/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_250/actor_hf/tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_250/actor_hf/tokenizer_config.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ }
30
+ },
31
+ "bos_token": "<s>",
32
+ "clean_up_tokenization_spaces": false,
33
+ "eos_token": "</s>",
34
+ "extra_special_tokens": {},
35
+ "legacy": false,
36
+ "model_max_length": 1000000000000000019884624838656,
37
+ "pad_token": "</s>",
38
+ "padding_side": "right",
39
+ "sp_model_kwargs": {},
40
+ "split_special_tokens": false,
41
+ "tokenizer_class": "LlamaTokenizer",
42
+ "unk_token": "<unk>",
43
+ "use_default_system_prompt": false
44
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_300/actor_hf/chat_template.jinja ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ system_message + '
2
+ ' }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ 'Human: ' + content + '
3
+ Assistant:' }}{% elif message['role'] == 'assistant' %}{{ content + '</s>' + '
4
+ ' }}{% endif %}{% endfor %}
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_300/actor_hf/config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "dtype": "bfloat16",
9
+ "eos_token_id": 2,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 4896,
15
+ "max_position_embeddings": 2048,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 32,
19
+ "num_hidden_layers": 22,
20
+ "num_key_value_heads": 4,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_scaling": null,
25
+ "rope_theta": 10000.0,
26
+ "tie_word_embeddings": false,
27
+ "transformers_version": "4.57.6",
28
+ "use_cache": false,
29
+ "vocab_size": 32000
30
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_300/actor_hf/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.57.6"
6
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_300/actor_hf/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:606c8aa4c856fc7e633f0c7be600f1a0c1991b350c2d0fc23368ae6d6b36ee5c
3
+ size 2001152552
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_300/actor_hf/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_300/actor_hf/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_300/actor_hf/tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_300/actor_hf/tokenizer_config.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ }
30
+ },
31
+ "bos_token": "<s>",
32
+ "clean_up_tokenization_spaces": false,
33
+ "eos_token": "</s>",
34
+ "extra_special_tokens": {},
35
+ "legacy": false,
36
+ "model_max_length": 1000000000000000019884624838656,
37
+ "pad_token": "</s>",
38
+ "padding_side": "right",
39
+ "sp_model_kwargs": {},
40
+ "split_special_tokens": false,
41
+ "tokenizer_class": "LlamaTokenizer",
42
+ "unk_token": "<unk>",
43
+ "use_default_system_prompt": false
44
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_50/actor_hf/chat_template.jinja ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ system_message + '
2
+ ' }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ 'Human: ' + content + '
3
+ Assistant:' }}{% elif message['role'] == 'assistant' %}{{ content + '</s>' + '
4
+ ' }}{% endif %}{% endfor %}
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_50/actor_hf/config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "dtype": "bfloat16",
9
+ "eos_token_id": 2,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 4896,
15
+ "max_position_embeddings": 2048,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 32,
19
+ "num_hidden_layers": 22,
20
+ "num_key_value_heads": 4,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_scaling": null,
25
+ "rope_theta": 10000.0,
26
+ "tie_word_embeddings": false,
27
+ "transformers_version": "4.57.6",
28
+ "use_cache": false,
29
+ "vocab_size": 32000
30
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_50/actor_hf/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.57.6"
6
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_50/actor_hf/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c9a517f645fe2f808229dcca35f349273137cd1a049d4b395932452d8aec214
3
+ size 2001152552
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_50/actor_hf/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_50/actor_hf/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_50/actor_hf/tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
evolm-1b/EvoLM-1B-CPT-160BT-evolm-GRPO/global_step_50/actor_hf/tokenizer_config.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ }
30
+ },
31
+ "bos_token": "<s>",
32
+ "clean_up_tokenization_spaces": false,
33
+ "eos_token": "</s>",
34
+ "extra_special_tokens": {},
35
+ "legacy": false,
36
+ "model_max_length": 1000000000000000019884624838656,
37
+ "pad_token": "</s>",
38
+ "padding_side": "right",
39
+ "sp_model_kwargs": {},
40
+ "split_special_tokens": false,
41
+ "tokenizer_class": "LlamaTokenizer",
42
+ "unk_token": "<unk>",
43
+ "use_default_system_prompt": false
44
+ }
evolm-1b/EvoLM-1B-CPT-160BT-omega-GRPO/global_step_100/actor_hf/chat_template.jinja ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ system_message + '
2
+ ' }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ 'Human: ' + content + '
3
+ Assistant:' }}{% elif message['role'] == 'assistant' %}{{ content + '</s>' + '
4
+ ' }}{% endif %}{% endfor %}
evolm-1b/EvoLM-1B-CPT-160BT-omega-GRPO/global_step_100/actor_hf/config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "dtype": "bfloat16",
9
+ "eos_token_id": 2,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 4896,
15
+ "max_position_embeddings": 2048,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 32,
19
+ "num_hidden_layers": 22,
20
+ "num_key_value_heads": 4,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_scaling": null,
25
+ "rope_theta": 10000.0,
26
+ "tie_word_embeddings": false,
27
+ "transformers_version": "4.57.6",
28
+ "use_cache": false,
29
+ "vocab_size": 32000
30
+ }