HassanShehata commited on
Commit
d58fdfa
·
verified ·
1 Parent(s): b362ce4

Upload 10 files

Browse files
config.json ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen3ForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "eos_token_id": 151645,
8
+ "head_dim": 128,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 1024,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 3072,
13
+ "layer_types": [
14
+ "full_attention",
15
+ "full_attention",
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention",
41
+ "full_attention"
42
+ ],
43
+ "max_position_embeddings": 40960,
44
+ "max_window_layers": 28,
45
+ "model_type": "qwen3",
46
+ "num_attention_heads": 16,
47
+ "num_hidden_layers": 28,
48
+ "num_key_value_heads": 8,
49
+ "pad_token_id": 151654,
50
+ "rms_norm_eps": 1e-06,
51
+ "rope_scaling": null,
52
+ "rope_theta": 1000000,
53
+ "sliding_window": null,
54
+ "tie_word_embeddings": true,
55
+ "torch_dtype": "bfloat16",
56
+ "transformers_version": "4.55.2",
57
+ "unsloth_fixed": true,
58
+ "unsloth_version": "2025.8.5",
59
+ "use_cache": true,
60
+ "use_sliding_window": false,
61
+ "vocab_size": 151936
62
+ }
generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "max_length": 40960,
9
+ "pad_token_id": 151654,
10
+ "temperature": 0.6,
11
+ "top_k": 20,
12
+ "top_p": 0.95,
13
+ "transformers_version": "4.55.2"
14
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff076179031a4551d8ba4c2cfc91c934707dfbc5ca7687f6dd8e53ea84d93d5a
3
+ size 1192135096
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:aeb13307a71acd8fe81861d94ad54ab689df773318809eed3cbe794b4492dae4
3
- size 11422654
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd92abe10737896583ba3e2e9826e986f3806ecc1446d7c06b8dc5b72de9e99f
3
+ size 11422639
tokenizer_config.json CHANGED
@@ -195,7 +195,7 @@
195
  "special": false
196
  },
197
  "151667": {
198
- "content": "<think>",
199
  "lstrip": false,
200
  "normalized": false,
201
  "rstrip": false,
@@ -203,7 +203,7 @@
203
  "special": false
204
  },
205
  "151668": {
206
- "content": "</think>",
207
  "lstrip": false,
208
  "normalized": false,
209
  "rstrip": false,
@@ -233,7 +233,7 @@
233
  "extra_special_tokens": {},
234
  "model_max_length": 40960,
235
  "pad_token": "<|vision_pad|>",
236
- "padding_side": "right",
237
  "split_special_tokens": false,
238
  "tokenizer_class": "Qwen2Tokenizer",
239
  "unk_token": null
 
195
  "special": false
196
  },
197
  "151667": {
198
+ "content": "",
199
  "lstrip": false,
200
  "normalized": false,
201
  "rstrip": false,
 
203
  "special": false
204
  },
205
  "151668": {
206
+ "content": "",
207
  "lstrip": false,
208
  "normalized": false,
209
  "rstrip": false,
 
233
  "extra_special_tokens": {},
234
  "model_max_length": 40960,
235
  "pad_token": "<|vision_pad|>",
236
+ "padding_side": "left",
237
  "split_special_tokens": false,
238
  "tokenizer_class": "Qwen2Tokenizer",
239
  "unk_token": null