notzero commited on
Commit
fb83e93
·
1 Parent(s): a8c7923

Trained with Unsloth

Browse files
Files changed (2) hide show
  1. config.json +3 -4
  2. generation_config.json +9 -2
config.json CHANGED
@@ -1,16 +1,16 @@
1
  {
2
- "_name_or_path": "unsloth/qwen2.5-1.5b",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "eos_token_id": 151643,
8
  "hidden_act": "silu",
9
  "hidden_size": 1536,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 8960,
12
  "max_position_embeddings": 32768,
13
- "max_window_layers": 28,
14
  "model_type": "qwen2",
15
  "num_attention_heads": 12,
16
  "num_hidden_layers": 28,
@@ -26,7 +26,6 @@
26
  "unsloth_fixed": true,
27
  "unsloth_version": "2025.1.8",
28
  "use_cache": true,
29
- "use_mrope": false,
30
  "use_sliding_window": false,
31
  "vocab_size": 151936
32
  }
 
1
  {
2
+ "_name_or_path": "unsloth/qwen2.5-1.5b-instruct",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
+ "eos_token_id": 151645,
8
  "hidden_act": "silu",
9
  "hidden_size": 1536,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 8960,
12
  "max_position_embeddings": 32768,
13
+ "max_window_layers": 21,
14
  "model_type": "qwen2",
15
  "num_attention_heads": 12,
16
  "num_hidden_layers": 28,
 
26
  "unsloth_fixed": true,
27
  "unsloth_version": "2025.1.8",
28
  "use_cache": true,
 
29
  "use_sliding_window": false,
30
  "vocab_size": 151936
31
  }
generation_config.json CHANGED
@@ -1,8 +1,15 @@
1
  {
2
  "bos_token_id": 151643,
3
- "eos_token_id": 151643,
 
 
 
 
4
  "max_length": 32768,
5
- "max_new_tokens": 2048,
6
  "pad_token_id": 151654,
 
 
 
 
7
  "transformers_version": "4.47.1"
8
  }
 
1
  {
2
  "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
  "max_length": 32768,
 
9
  "pad_token_id": 151654,
10
+ "repetition_penalty": 1.1,
11
+ "temperature": 0.7,
12
+ "top_k": 20,
13
+ "top_p": 0.8,
14
  "transformers_version": "4.47.1"
15
  }