datasysdev commited on
Commit
beeb133
·
verified ·
1 Parent(s): 0ff1382

Upload Gemma3ForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +3 -5
  2. generation_config.json +1 -2
  3. model.safetensors +1 -1
config.json CHANGED
@@ -7,7 +7,8 @@
7
  "attention_dropout": 0.0,
8
  "attn_logit_softcapping": null,
9
  "bos_token_id": 2,
10
- "eos_token_id": 106,
 
11
  "final_logit_softcapping": null,
12
  "head_dim": 256,
13
  "hidden_activation": "gelu_pytorch_tanh",
@@ -46,10 +47,7 @@
46
  "rope_scaling": null,
47
  "rope_theta": 1000000.0,
48
  "sliding_window": 512,
49
- "torch_dtype": "bfloat16",
50
- "transformers_version": "4.55.4",
51
- "unsloth_fixed": true,
52
- "unsloth_version": "2025.9.2",
53
  "use_bidirectional_attention": false,
54
  "use_cache": true,
55
  "vocab_size": 262144
 
7
  "attention_dropout": 0.0,
8
  "attn_logit_softcapping": null,
9
  "bos_token_id": 2,
10
+ "dtype": "bfloat16",
11
+ "eos_token_id": 1,
12
  "final_logit_softcapping": null,
13
  "head_dim": 256,
14
  "hidden_activation": "gelu_pytorch_tanh",
 
47
  "rope_scaling": null,
48
  "rope_theta": 1000000.0,
49
  "sliding_window": 512,
50
+ "transformers_version": "4.56.0",
 
 
 
51
  "use_bidirectional_attention": false,
52
  "use_cache": true,
53
  "vocab_size": 262144
generation_config.json CHANGED
@@ -6,9 +6,8 @@
6
  1,
7
  106
8
  ],
9
- "max_length": 32768,
10
  "pad_token_id": 0,
11
  "top_k": 64,
12
  "top_p": 0.95,
13
- "transformers_version": "4.55.4"
14
  }
 
6
  1,
7
  106
8
  ],
 
9
  "pad_token_id": 0,
10
  "top_k": 64,
11
  "top_p": 0.95,
12
+ "transformers_version": "4.56.0"
13
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ea64c6221cbfbf981167c763645d9bb0c5cea6d5d0af560c9f845af00e983256
3
  size 536223056
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d14ffaa3b52b9bc34bc7652ee8978cdbf842dfb3e8b3cbd9ccafcc2fdaa86e72
3
  size 536223056