Vanbitcase commited on
Commit
e22fdc3
·
verified ·
1 Parent(s): 0a6f529

Upload 2 files

Browse files
Files changed (2) hide show
  1. config.json +131 -0
  2. generation_config.json +14 -0
config.json ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2VLForConditionalGeneration"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "eos_token_id": 151645,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 1536,
9
+ "image_token_id": 151655,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 8960,
12
+ "max_position_embeddings": 32768,
13
+ "max_window_layers": 28,
14
+ "model_type": "qwen2_vl",
15
+ "num_attention_heads": 12,
16
+ "num_hidden_layers": 28,
17
+ "num_key_value_heads": 2,
18
+ "pad_token_id": 151654,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_scaling": {
21
+ "mrope_section": [
22
+ 16,
23
+ 24,
24
+ 24
25
+ ],
26
+ "rope_type": "default",
27
+ "type": "default"
28
+ },
29
+ "rope_theta": 1000000.0,
30
+ "sliding_window": 32768,
31
+ "text_config": {
32
+ "_name_or_path": "Qwen/Qwen2-VL-2B-Instruct",
33
+ "architectures": [
34
+ "Qwen2VLForConditionalGeneration"
35
+ ],
36
+ "attention_dropout": 0.0,
37
+ "eos_token_id": 151645,
38
+ "hidden_act": "silu",
39
+ "hidden_size": 1536,
40
+ "image_token_id": null,
41
+ "initializer_range": 0.02,
42
+ "intermediate_size": 8960,
43
+ "layer_types": [
44
+ "full_attention",
45
+ "full_attention",
46
+ "full_attention",
47
+ "full_attention",
48
+ "full_attention",
49
+ "full_attention",
50
+ "full_attention",
51
+ "full_attention",
52
+ "full_attention",
53
+ "full_attention",
54
+ "full_attention",
55
+ "full_attention",
56
+ "full_attention",
57
+ "full_attention",
58
+ "full_attention",
59
+ "full_attention",
60
+ "full_attention",
61
+ "full_attention",
62
+ "full_attention",
63
+ "full_attention",
64
+ "full_attention",
65
+ "full_attention",
66
+ "full_attention",
67
+ "full_attention",
68
+ "full_attention",
69
+ "full_attention",
70
+ "full_attention",
71
+ "full_attention"
72
+ ],
73
+ "max_position_embeddings": 32768,
74
+ "max_window_layers": 28,
75
+ "model_type": "qwen2_vl_text",
76
+ "num_attention_heads": 12,
77
+ "num_hidden_layers": 28,
78
+ "num_key_value_heads": 2,
79
+ "pad_token_id": 151654,
80
+ "rms_norm_eps": 1e-06,
81
+ "rope_scaling": {
82
+ "mrope_section": [
83
+ 16,
84
+ 24,
85
+ 24
86
+ ],
87
+ "rope_type": "default",
88
+ "type": "default"
89
+ },
90
+ "rope_theta": 1000000.0,
91
+ "sliding_window": null,
92
+ "tie_word_embeddings": true,
93
+ "torch_dtype": "bfloat16",
94
+ "unsloth_fixed": true,
95
+ "use_cache": true,
96
+ "use_sliding_window": false,
97
+ "video_token_id": null,
98
+ "vision_end_token_id": 151653,
99
+ "vision_start_token_id": 151652,
100
+ "vision_token_id": 151654,
101
+ "vocab_size": 151936
102
+ },
103
+ "torch_dtype": "bfloat16",
104
+ "transformers_version": "4.55.2",
105
+ "unsloth_fixed": true,
106
+ "unsloth_version": "2025.8.8",
107
+ "use_cache": true,
108
+ "use_sliding_window": false,
109
+ "video_token_id": 151656,
110
+ "vision_config": {
111
+ "depth": 32,
112
+ "embed_dim": 1280,
113
+ "hidden_act": "quick_gelu",
114
+ "hidden_size": 1536,
115
+ "in_channels": 3,
116
+ "in_chans": 3,
117
+ "initializer_range": 0.02,
118
+ "mlp_ratio": 4,
119
+ "model_type": "qwen2_vl",
120
+ "num_heads": 16,
121
+ "patch_size": 14,
122
+ "spatial_merge_size": 2,
123
+ "spatial_patch_size": 14,
124
+ "temporal_patch_size": 2,
125
+ "torch_dtype": "bfloat16"
126
+ },
127
+ "vision_end_token_id": 151653,
128
+ "vision_start_token_id": 151652,
129
+ "vision_token_id": 151654,
130
+ "vocab_size": 151936
131
+ }
generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "max_length": 32768,
9
+ "pad_token_id": 151654,
10
+ "temperature": 0.01,
11
+ "top_k": 1,
12
+ "top_p": 0.001,
13
+ "transformers_version": "4.55.2"
14
+ }