EuroswarmsInstitute commited on
Commit
7543264
·
verified ·
1 Parent(s): 4142335

Rollback to pre-MCQ full fine-tune

Browse files
Files changed (3) hide show
  1. config.json +2 -2
  2. generation_config.json +1 -0
  3. model.safetensors +2 -2
config.json CHANGED
@@ -3,7 +3,8 @@
3
  "Qwen2ForCausalLM"
4
  ],
5
  "attention_dropout": 0.0,
6
- "dtype": "float32",
 
7
  "eos_token_id": 151645,
8
  "hidden_act": "silu",
9
  "hidden_size": 1536,
@@ -45,7 +46,6 @@
45
  "num_attention_heads": 12,
46
  "num_hidden_layers": 28,
47
  "num_key_value_heads": 2,
48
- "pad_token_id": 151643,
49
  "rms_norm_eps": 1e-06,
50
  "rope_parameters": {
51
  "rope_theta": 1000000.0,
 
3
  "Qwen2ForCausalLM"
4
  ],
5
  "attention_dropout": 0.0,
6
+ "bos_token_id": 151643,
7
+ "dtype": "bfloat16",
8
  "eos_token_id": 151645,
9
  "hidden_act": "silu",
10
  "hidden_size": 1536,
 
46
  "num_attention_heads": 12,
47
  "num_hidden_layers": 28,
48
  "num_key_value_heads": 2,
 
49
  "rms_norm_eps": 1e-06,
50
  "rope_parameters": {
51
  "rope_theta": 1000000.0,
generation_config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "do_sample": true,
3
  "eos_token_id": [
4
  151645,
 
1
  {
2
+ "bos_token_id": 151643,
3
  "do_sample": true,
4
  "eos_token_id": [
5
  151645,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:299d3434b7055f0939d1a23c209d6b3fc889f13486f30c9c9bd6aa463731ef74
3
- size 3554389592
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9dcbe6f21b10abd806761f576293109e17a3af10db5d2a4c14cf95d0448caab2
3
+ size 3087467144