runfar commited on
Commit
7edb613
·
verified ·
1 Parent(s): 042cb58

Model save

Browse files
README.md CHANGED
@@ -4,8 +4,8 @@ library_name: transformers
4
  model_name: MyGemmaNPC
5
  tags:
6
  - generated_from_trainer
7
- - trl
8
  - sft
 
9
  licence: license
10
  ---
11
 
@@ -34,11 +34,11 @@ This model was trained with SFT.
34
 
35
  ### Framework versions
36
 
37
- - TRL: 0.21.0
38
- - Transformers: 4.55.2
39
  - Pytorch: 2.8.0+cu126
40
  - Datasets: 4.0.0
41
- - Tokenizers: 0.21.4
42
 
43
  ## Citations
44
 
 
4
  model_name: MyGemmaNPC
5
  tags:
6
  - generated_from_trainer
 
7
  - sft
8
+ - trl
9
  licence: license
10
  ---
11
 
 
34
 
35
  ### Framework versions
36
 
37
+ - TRL: 0.25.1
38
+ - Transformers: 4.57.1
39
  - Pytorch: 2.8.0+cu126
40
  - Datasets: 4.0.0
41
+ - Tokenizers: 0.22.1
42
 
43
  ## Citations
44
 
config.json CHANGED
@@ -7,6 +7,7 @@
7
  "attention_dropout": 0.0,
8
  "attn_logit_softcapping": null,
9
  "bos_token_id": 2,
 
10
  "eos_token_id": 1,
11
  "final_logit_softcapping": null,
12
  "head_dim": 256,
@@ -46,8 +47,7 @@
46
  "rope_scaling": null,
47
  "rope_theta": 1000000.0,
48
  "sliding_window": 512,
49
- "torch_dtype": "bfloat16",
50
- "transformers_version": "4.55.2",
51
  "use_bidirectional_attention": false,
52
  "use_cache": true,
53
  "vocab_size": 262144
 
7
  "attention_dropout": 0.0,
8
  "attn_logit_softcapping": null,
9
  "bos_token_id": 2,
10
+ "dtype": "bfloat16",
11
  "eos_token_id": 1,
12
  "final_logit_softcapping": null,
13
  "head_dim": 256,
 
47
  "rope_scaling": null,
48
  "rope_theta": 1000000.0,
49
  "sliding_window": 512,
50
+ "transformers_version": "4.57.1",
 
51
  "use_bidirectional_attention": false,
52
  "use_cache": true,
53
  "vocab_size": 262144
generation_config.json CHANGED
@@ -1,11 +1,13 @@
1
  {
 
2
  "cache_implementation": "hybrid",
3
  "do_sample": true,
4
  "eos_token_id": [
5
  1,
6
  106
7
  ],
 
8
  "top_k": 64,
9
  "top_p": 0.95,
10
- "transformers_version": "4.55.2"
11
  }
 
1
  {
2
+ "bos_token_id": 2,
3
  "cache_implementation": "hybrid",
4
  "do_sample": true,
5
  "eos_token_id": [
6
  1,
7
  106
8
  ],
9
+ "pad_token_id": 0,
10
  "top_k": 64,
11
  "top_p": 0.95,
12
+ "transformers_version": "4.57.1"
13
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6971e0fcfb2e49f3c08881e2678bbab74b498ab85ae998d8127eb02de687e8a7
3
  size 536223056
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b10557428c98c0639986a03e71013efb7cdb38b40983d9048aa485cef784e3c
3
  size 536223056
runs/Nov17_04-52-33_c7152112ee2b/events.out.tfevents.1763355178.c7152112ee2b.662.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ffce53cf89dbd7086f6ab2a6f205ce1c9d12eb0868aab7edae1e64cc4f358be
3
+ size 18179
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4a547a735c7fd40d7bd0269a55b8d9df651a090d67e98b197749f735ede46e81
3
- size 6225
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef33d4548b9b56b75cab391b5e1bb34b5bdee04ccfc806333212d4685d865a4e
3
+ size 6289