SoyoKaze83 commited on
Commit
ed521f2
·
verified ·
1 Parent(s): b8ed1af

Saving checkpoint: rougeL_1

Browse files
Files changed (3) hide show
  1. config.json +5 -4
  2. generation_config.json +0 -1
  3. model.safetensors +2 -2
config.json CHANGED
@@ -8,7 +8,10 @@
8
  "attn_logit_softcapping": null,
9
  "bos_token_id": 2,
10
  "cache_implementation": "hybrid",
11
- "eos_token_id": 106,
 
 
 
12
  "final_logit_softcapping": null,
13
  "head_dim": 256,
14
  "hidden_activation": "gelu_pytorch_tanh",
@@ -55,10 +58,8 @@
55
  "rope_scaling": null,
56
  "rope_theta": 1000000,
57
  "sliding_window": 512,
58
- "torch_dtype": "bfloat16",
59
  "transformers_version": "4.55.0",
60
- "unsloth_fixed": true,
61
- "unsloth_version": "2025.8.4",
62
  "use_cache": true,
63
  "vocab_size": 262148
64
  }
 
8
  "attn_logit_softcapping": null,
9
  "bos_token_id": 2,
10
  "cache_implementation": "hybrid",
11
+ "eos_token_id": [
12
+ 1,
13
+ 106
14
+ ],
15
  "final_logit_softcapping": null,
16
  "head_dim": 256,
17
  "hidden_activation": "gelu_pytorch_tanh",
 
58
  "rope_scaling": null,
59
  "rope_theta": 1000000,
60
  "sliding_window": 512,
61
+ "torch_dtype": "float32",
62
  "transformers_version": "4.55.0",
 
 
63
  "use_cache": true,
64
  "vocab_size": 262148
65
  }
generation_config.json CHANGED
@@ -6,7 +6,6 @@
6
  1,
7
  106
8
  ],
9
- "max_length": 32768,
10
  "pad_token_id": 0,
11
  "top_k": 64,
12
  "top_p": 0.95,
 
6
  1,
7
  106
8
  ],
 
9
  "pad_token_id": 0,
10
  "top_k": 64,
11
  "top_p": 0.95,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5ba7dbb236d8ec020c8e0878ab58d6a8942da2ca746e14bc3fb3987852ccca81
3
- size 1999820424
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a2d5efd8741d2a64d023aacd70ef8d99767b68e155ce195a8c48825837d5230
3
+ size 3999601392