notzero commited on
Commit
c76a6cd
·
verified ·
1 Parent(s): b8195b1

Trained with Unsloth

Browse files
Files changed (3) hide show
  1. config.json +4 -6
  2. generation_config.json +10 -3
  3. pytorch_model.bin +1 -1
config.json CHANGED
@@ -1,16 +1,15 @@
1
  {
2
- "_name_or_path": "notzero/modelcombinedgrpo",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "eos_token_id": 151643,
8
  "hidden_act": "silu",
9
  "hidden_size": 1536,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 8960,
12
  "max_position_embeddings": 32768,
13
- "max_window_layers": 28,
14
  "model_type": "qwen2",
15
  "num_attention_heads": 12,
16
  "num_hidden_layers": 28,
@@ -22,11 +21,10 @@
22
  "sliding_window": null,
23
  "tie_word_embeddings": true,
24
  "torch_dtype": "float16",
25
- "transformers_version": "4.47.1",
26
  "unsloth_fixed": true,
27
- "unsloth_version": "2025.1.8",
28
  "use_cache": true,
29
- "use_mrope": false,
30
  "use_sliding_window": false,
31
  "vocab_size": 151936
32
  }
 
1
  {
 
2
  "architectures": [
3
  "Qwen2ForCausalLM"
4
  ],
5
  "attention_dropout": 0.0,
6
+ "eos_token_id": 151645,
7
  "hidden_act": "silu",
8
  "hidden_size": 1536,
9
  "initializer_range": 0.02,
10
  "intermediate_size": 8960,
11
  "max_position_embeddings": 32768,
12
+ "max_window_layers": 21,
13
  "model_type": "qwen2",
14
  "num_attention_heads": 12,
15
  "num_hidden_layers": 28,
 
21
  "sliding_window": null,
22
  "tie_word_embeddings": true,
23
  "torch_dtype": "float16",
24
+ "transformers_version": "4.51.0",
25
  "unsloth_fixed": true,
26
+ "unsloth_version": "2025.3.19",
27
  "use_cache": true,
 
28
  "use_sliding_window": false,
29
  "vocab_size": 151936
30
  }
generation_config.json CHANGED
@@ -1,8 +1,15 @@
1
  {
2
  "bos_token_id": 151643,
3
- "eos_token_id": 151643,
 
 
 
 
4
  "max_length": 32768,
5
- "max_new_tokens": 2048,
6
  "pad_token_id": 151654,
7
- "transformers_version": "4.47.1"
 
 
 
 
8
  }
 
1
  {
2
  "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
  "max_length": 32768,
 
9
  "pad_token_id": 151654,
10
+ "repetition_penalty": 1.1,
11
+ "temperature": 0.7,
12
+ "top_k": 20,
13
+ "top_p": 0.8,
14
+ "transformers_version": "4.51.0"
15
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4d5fb9902208f8a890cc41a679146841c7410cd03d670e95b6c3fa2c5ea5a69a
3
  size 3087542290
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:753ac440c367a62abbeaec15e65fb5a50819325e2b6340d8f792b10e4652c145
3
  size 3087542290