TheMockingJay1013 commited on
Commit
d2e2331
·
verified ·
1 Parent(s): fe42ae7

Upload Gemma3ForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +5 -5
  2. generation_config.json +4 -4
  3. model.safetensors +2 -2
config.json CHANGED
@@ -5,9 +5,9 @@
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "attn_logit_softcapping": null,
8
- "bos_token_id": 262145,
9
  "cache_implementation": "hybrid",
10
- "eos_token_id": 262146,
11
  "final_logit_softcapping": null,
12
  "head_dim": 256,
13
  "hidden_activation": "gelu_pytorch_tanh",
@@ -19,7 +19,7 @@
19
  "num_attention_heads": 4,
20
  "num_hidden_layers": 26,
21
  "num_key_value_heads": 1,
22
- "pad_token_id": 262146,
23
  "query_pre_attn_scalar": 256,
24
  "rms_norm_eps": 1e-06,
25
  "rope_local_base_freq": 10000,
@@ -28,7 +28,7 @@
28
  "sliding_window": 512,
29
  "sliding_window_pattern": 6,
30
  "torch_dtype": "float32",
31
- "transformers_version": "4.50.3",
32
  "use_cache": true,
33
- "vocab_size": 262147
34
  }
 
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "attn_logit_softcapping": null,
8
+ "bos_token_id": 2,
9
  "cache_implementation": "hybrid",
10
+ "eos_token_id": 1,
11
  "final_logit_softcapping": null,
12
  "head_dim": 256,
13
  "hidden_activation": "gelu_pytorch_tanh",
 
19
  "num_attention_heads": 4,
20
  "num_hidden_layers": 26,
21
  "num_key_value_heads": 1,
22
+ "pad_token_id": 0,
23
  "query_pre_attn_scalar": 256,
24
  "rms_norm_eps": 1e-06,
25
  "rope_local_base_freq": 10000,
 
28
  "sliding_window": 512,
29
  "sliding_window_pattern": 6,
30
  "torch_dtype": "float32",
31
+ "transformers_version": "4.51.0",
32
  "use_cache": true,
33
+ "vocab_size": 262144
34
  }
generation_config.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
  "_from_model_config": true,
3
- "bos_token_id": 262145,
4
  "cache_implementation": "hybrid",
5
- "eos_token_id": 262146,
6
- "pad_token_id": 262146,
7
- "transformers_version": "4.50.3"
8
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "bos_token_id": 2,
4
  "cache_implementation": "hybrid",
5
+ "eos_token_id": 1,
6
+ "pad_token_id": 0,
7
+ "transformers_version": "4.51.0"
8
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:04cc17b1036011501d7d65f258ed920b45d89b588d5c02b655e12b05df7fac17
3
- size 3999596784
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16e4e9fc91ce3d3c5b262cfb7a93865a1bddfbea32cdb6f5ef3f83c5788e559f
3
+ size 3999582960