qgallouedec HF Staff commited on
Commit
b53a06b
·
verified ·
1 Parent(s): ee159b7

Upload Qwen3ForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +6 -2
  2. generation_config.json +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -9,6 +9,10 @@
9
  "hidden_size": 128,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 32,
 
 
 
 
12
  "max_position_embeddings": 32768,
13
  "max_window_layers": 28,
14
  "model_type": "qwen3",
@@ -18,10 +22,10 @@
18
  "rms_norm_eps": 1e-06,
19
  "rope_scaling": null,
20
  "rope_theta": 10000.0,
21
- "sliding_window": 4096,
22
  "tie_word_embeddings": false,
23
  "torch_dtype": "float32",
24
- "transformers_version": "4.52.0.dev0",
25
  "use_cache": true,
26
  "use_sliding_window": false,
27
  "vocab_size": 151669
 
9
  "hidden_size": 128,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 32,
12
+ "layer_types": [
13
+ "full_attention",
14
+ "full_attention"
15
+ ],
16
  "max_position_embeddings": 32768,
17
  "max_window_layers": 28,
18
  "model_type": "qwen3",
 
22
  "rms_norm_eps": 1e-06,
23
  "rope_scaling": null,
24
  "rope_theta": 10000.0,
25
+ "sliding_window": null,
26
  "tie_word_embeddings": false,
27
  "torch_dtype": "float32",
28
+ "transformers_version": "4.55.0.dev0",
29
  "use_cache": true,
30
  "use_sliding_window": false,
31
  "vocab_size": 151669
generation_config.json CHANGED
@@ -1,4 +1,4 @@
1
  {
2
  "_from_model_config": true,
3
- "transformers_version": "4.52.0.dev0"
4
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "transformers_version": "4.55.0.dev0"
4
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:50f7cd163bbdb08f613374e155b1a0806ff63c5af257a6de49ede65aed5eccfd
3
  size 156987560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d4b8cd5c11e1bb7b8e8d05693a56af8bbf8ad4d1ff9fd90565299004376b746
3
  size 156987560