xcreates commited on
Commit
143c979
·
verified ·
1 Parent(s): 4932a8c

Upload complete model

Browse files
Files changed (1) hide show
  1. vision_language_encoder/config.json +61 -0
vision_language_encoder/config.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "GlmImageForConditionalGeneration"
4
+ ],
5
+ "image_start_token_id": 16384,
6
+ "image_end_token_id": 16385,
7
+ "image_token_id": 167855,
8
+ "model_type": "glm_image",
9
+ "text_config": {
10
+ "attention_dropout": 0.0,
11
+ "eos_token_id": 16385,
12
+ "pad_token_id": 167841,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 4096,
15
+ "initializer_range": 0.02,
16
+ "intermediate_size": 13696,
17
+ "max_position_embeddings": 131072,
18
+ "model_type": "glm_image_text",
19
+ "num_attention_heads": 32,
20
+ "num_hidden_layers": 40,
21
+ "num_key_value_heads": 2,
22
+ "rms_norm_eps": 1e-05,
23
+ "dtype": "bfloat16",
24
+ "rope_parameters": {
25
+ "rope_theta": 10000,
26
+ "rope_type": "default",
27
+ "mrope_section": [
28
+ 8,
29
+ 12,
30
+ 12
31
+ ],
32
+ "partial_rotary_factor": 0.5
33
+ },
34
+ "use_cache": true,
35
+ "vision_vocab_size": 16512,
36
+ "vocab_size": 168064
37
+ },
38
+ "transformers_version": "5.0.0.dev0",
39
+ "vision_config": {
40
+ "attention_bias": true,
41
+ "attention_dropout": 0.0,
42
+ "depth": 40,
43
+ "hidden_act": "gelu",
44
+ "hidden_size": 1536,
45
+ "image_size": 2048,
46
+ "in_channels": 3,
47
+ "intermediate_size": 6144,
48
+ "layer_norm_eps": 1e-06,
49
+ "model_type": "glm_image_vision",
50
+ "num_heads": 16,
51
+ "patch_size": 16
52
+ },
53
+ "vq_config": {
54
+ "embed_dim": 2048,
55
+ "in_channels": 3,
56
+ "initializer_range": 0.02,
57
+ "latent_channels": 1536,
58
+ "model_type": "glm_image_vqmodel",
59
+ "num_embeddings": 16384
60
+ }
61
+ }