erfanzar commited on
Commit
5d8e603
·
verified ·
1 Parent(s): d7ae3e7

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. README.md +179 -0
  2. checkpoint_metadata.json +6 -0
  3. config.json +349 -0
  4. generation_config.json +15 -0
  5. model/params/model/language_model/layers/0/self_attn/k_proj/bias/0 +0 -0
  6. model/params/model/language_model/layers/0/self_attn/o_proj/kernel/.zarray +1 -0
  7. model/params/model/language_model/layers/1/mlp/down_proj/kernel/.zarray +1 -0
  8. model/params/model/language_model/layers/1/self_attn/q_proj/bias/0 +0 -0
  9. model/params/model/language_model/layers/1/self_attn/v_proj/bias/0 +0 -0
  10. model/params/model/language_model/layers/10/input_layernorm/kernel/.zarray +1 -0
  11. model/params/model/language_model/layers/10/post_mlp_layernorm/kernel/.zarray +1 -0
  12. model/params/model/language_model/layers/10/self_attn/q_proj/bias/.zarray +1 -0
  13. model/params/model/language_model/layers/10/self_attn/q_proj/bias/0 +0 -0
  14. model/params/model/language_model/layers/10/self_attn/q_proj/kernel/.zarray +1 -0
  15. model/params/model/language_model/layers/10/self_attn/v_proj/bias/.zarray +1 -0
  16. model/params/model/language_model/layers/10/self_attn/v_proj/bias/0 +0 -0
  17. model/params/model/language_model/layers/10/self_attn/v_proj/kernel/.zarray +1 -0
  18. model/params/model/language_model/layers/11/input_layernorm/kernel/.zarray +1 -0
  19. model/params/model/language_model/layers/11/input_layernorm/kernel/0 +0 -0
  20. model/params/model/language_model/layers/11/mlp/down_proj/kernel/.zarray +1 -0
  21. model/params/model/language_model/layers/11/mlp/gate_up_proj/kernel/.zarray +1 -0
  22. model/params/model/language_model/layers/11/post_attention_layernorm/kernel/.zarray +1 -0
  23. model/params/model/language_model/layers/11/post_attention_layernorm/kernel/0 +0 -0
  24. model/params/model/language_model/layers/11/post_mlp_layernorm/kernel/.zarray +1 -0
  25. model/params/model/language_model/layers/11/post_mlp_layernorm/kernel/0 +0 -0
  26. model/params/model/language_model/layers/11/post_self_attn_layernorm/kernel/.zarray +1 -0
  27. model/params/model/language_model/layers/11/post_self_attn_layernorm/kernel/0 +0 -0
  28. model/params/model/language_model/layers/11/self_attn/k_proj/kernel/.zarray +1 -0
  29. model/params/model/visual/blocks/4/norm1/kernel/0 +0 -0
  30. model/params/model/visual/blocks/4/norm2/kernel/0 +0 -0
  31. model/params/model/visual/blocks/5/attn/proj/kernel/.zarray +1 -0
  32. model/params/model/visual/blocks/5/attn/qkv/kernel/.zarray +1 -0
  33. model/params/model/visual/blocks/5/mlp/gate_proj/kernel/.zarray +1 -0
  34. model/params/model/visual/blocks/5/mlp/up_proj/kernel/.zarray +1 -0
  35. model/params/model/visual/blocks/5/norm1/kernel/0 +0 -0
  36. model/params/model/visual/blocks/5/norm2/kernel/0 +0 -0
  37. model/params/model/visual/blocks/6/attn/proj/kernel/.zarray +1 -0
  38. model/params/model/visual/blocks/6/mlp/down_proj/kernel/.zarray +1 -0
  39. model/params/model/visual/blocks/6/mlp/gate_proj/kernel/.zarray +1 -0
  40. model/params/model/visual/blocks/6/mlp/up_proj/kernel/.zarray +1 -0
  41. model/params/model/visual/blocks/6/norm1/kernel/.zarray +1 -0
  42. model/params/model/visual/blocks/6/norm2/kernel/0 +0 -0
  43. model/params/model/visual/blocks/7/attn/qkv/kernel/.zarray +1 -0
  44. model/params/model/visual/blocks/7/norm1/kernel/.zarray +1 -0
  45. model/params/model/visual/blocks/7/norm1/kernel/0 +0 -0
  46. model/params/model/visual/blocks/7/norm2/kernel/0 +0 -0
  47. model/params/model/visual/blocks/8/attn/qkv/kernel/.zarray +1 -0
  48. model/params/model/visual/blocks/8/mlp/down_proj/kernel/.zarray +1 -0
  49. model/params/model/visual/blocks/8/norm1/kernel/0 +0 -0
  50. model/params/model/visual/blocks/8/norm2/kernel/.zarray +1 -0
README.md ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: easydel
3
+ pipeline_tag: image-to-text
4
+ tags:
5
+ - easydel
6
+ - jax
7
+ - "glm4v"
8
+ - "ImageTextToText"
9
+ - "vanilla"
10
+ ---
11
+
12
+ <p align="center">
13
+ <a href="https://github.com/erfanzar/EasyDeL">
14
+ <img src="https://raw.githubusercontent.com/erfanzar/easydel/main/images/easydel-logo-with-text.png" height="80" alt="EasyDeL" />
15
+ </a>
16
+ </p>
17
+
18
+ <h1 align="center">zai-org/GLM-4.6V-Flash</h1>
19
+
20
+ <p align="center">
21
+ EasyDeL checkpoint converted from zai-org/GLM-4.6V-Flash.
22
+ </p>
23
+
24
+ <p align="center">
25
+ <a href="https://huggingface.co/EasyDeL/GLM-4.6V-Flash">
26
+ <img src="https://img.shields.io/badge?label=HF&message=EasyDeL/GLM-4.6V-Flash&color=FFD21E&style=flat-square" alt="HuggingFace Repo" />
27
+ </a>
28
+ <a href="https://github.com/erfanzar/EasyDeL">
29
+ <img src="https://img.shields.io/badge?label=EasyDeL&message=v0.2.0&color=0B5FFF&style=flat-square" alt="EasyDeL Version" />
30
+ </a>
31
+ <img src="https://img.shields.io/badge?label=Model&message=glm4v&color=0A66C2&style=flat-square" alt="Model Type" />
32
+ <img src="https://img.shields.io/badge?label=Task&message=ImageTextToText&color=2EAD4D&style=flat-square" alt="Task" />
33
+ <img src="https://img.shields.io/badge?label=Attention&message=vanilla&color=8A2BE2&style=flat-square" alt="Attention Mechanism" />
34
+ </p>
35
+
36
+ ---
37
+
38
+ ## At a Glance
39
+
40
+ | Field | Value |
41
+ | --- | --- |
42
+ | Repo ID | `EasyDeL/GLM-4.6V-Flash` |
43
+ | Model type | `glm4v` |
44
+ | Task | `ImageTextToText` |
45
+ | Attention | `vanilla` (`AttentionMechanisms.VANILLA`) |
46
+ | EasyDeL | `0.2.0` |
47
+
48
+ ## Overview
49
+
50
+ This checkpoint is intended to be loaded with EasyDeL on JAX (CPU/GPU/TPU). It supports sharded loading with `auto_shard_model=True` and configurable precision via `dtype`, `param_dtype`, and `precision`.
51
+
52
+ ## Quickstart
53
+
54
+ ```python
55
+ import easydel as ed
56
+ from jax import numpy as jnp, lax
57
+
58
+ repo_id = "EasyDeL/GLM-4.6V-Flash"
59
+
60
+ dtype = jnp.bfloat16 # try jnp.float16 on many GPUs
61
+
62
+ model = ed.AutoEasyDeLModelForImageTextToText.from_pretrained(
63
+ repo_id,
64
+ config_kwargs=ed.EasyDeLBaseConfigDict(
65
+ attn_dtype=dtype,
66
+ attn_mechanism=ed.AttentionMechanisms.VANILLA,
67
+ ),
68
+ dtype=dtype,
69
+ param_dtype=dtype,
70
+ precision=lax.Precision("fastest"),
71
+ auto_shard_model=True,
72
+ )
73
+ ```
74
+
75
+ If the repository only provides PyTorch weights, pass `from_torch=True` to `from_pretrained(...)`.
76
+
77
+ ## Sharding & Parallelism (Multi-Device)
78
+
79
+ EasyDeL can scale to multiple devices by creating a logical device mesh. Most EasyDeL loaders use a 5D mesh:
80
+
81
+ - `dp`: data parallel (replicated parameters, different batch shards)
82
+ - `fsdp`: parameter sharding (memory saver; often the biggest axis)
83
+ - `ep`: expert parallel (MoE; keep `1` for non-MoE models)
84
+ - `tp`: tensor parallel (splits large matmuls)
85
+ - `sp`: sequence parallel (splits sequence dimension)
86
+
87
+ Use `sharding_axis_names=("dp","fsdp","ep","tp","sp")` and choose `sharding_axis_dims` so that their product equals your device count.
88
+ You can use `-1` in `sharding_axis_dims` to let EasyDeL infer the remaining dimension.
89
+
90
+ <details>
91
+ <summary>Example sharding configs</summary>
92
+
93
+ ```python
94
+ # 8 devices, pure FSDP
95
+ sharding_axis_dims = (1, 8, 1, 1, 1)
96
+
97
+ # 8 devices, 2-way DP x 4-way FSDP
98
+ sharding_axis_dims = (2, 4, 1, 1, 1)
99
+
100
+ # 8 devices, 4-way FSDP x 2-way TP
101
+ sharding_axis_dims = (1, 4, 1, 2, 1)
102
+ ```
103
+ </details>
104
+
105
+ ## Using via `eLargeModel` (ELM)
106
+
107
+ `eLargeModel` is a higher-level interface that wires together loading, sharding, training, and eSurge inference from a single config.
108
+
109
+ ```python
110
+ from easydel import eLargeModel
111
+
112
+ repo_id = "EasyDeL/GLM-4.6V-Flash"
113
+
114
+ elm = eLargeModel.from_pretrained(repo_id) # task is auto-detected
115
+ elm.set_dtype("bf16")
116
+ elm.set_sharding(axis_names=("dp", "fsdp", "ep", "tp", "sp"), axis_dims=(1, -1, 1, 1, 1))
117
+
118
+ model = elm.build_model()
119
+ # Optional: build an inference engine
120
+ # engine = elm.build_esurge()
121
+ ```
122
+
123
+ <details>
124
+ <summary>ELM YAML config example</summary>
125
+
126
+ ```yaml
127
+ model:
128
+ name_or_path: "EasyDeL/GLM-4.6V-Flash"
129
+
130
+ loader:
131
+ dtype: bf16
132
+ param_dtype: bf16
133
+
134
+ sharding:
135
+ axis_dims: [1, -1, 1, 1, 1]
136
+ auto_shard_model: true
137
+ ```
138
+ </details>
139
+
140
+ ## Features
141
+
142
+ **EasyDeL:**
143
+ - JAX native implementation and sharded execution
144
+ - Configurable attention backends via `AttentionMechanisms.*`
145
+ - Precision control via `dtype`, `param_dtype`, and `precision`
146
+
147
+ ## Installation
148
+
149
+ ```bash
150
+ pip install easydel
151
+ ```
152
+
153
+ ## Links
154
+
155
+ - EasyDeL GitHub: https://github.com/erfanzar/EasyDeL
156
+ - Docs: https://easydel.readthedocs.io/en/latest/
157
+
158
+ ## Supported Tasks
159
+
160
+ - ImageTextToText
161
+
162
+ ## Limitations
163
+
164
+ - Refer to the original model card for training data, evaluation, and intended use.
165
+
166
+ ## License
167
+
168
+ EasyDeL is released under the Apache-2.0 license. The license for this model's weights may differ; please consult the original repository.
169
+
170
+ ## Citation
171
+
172
+ ```bibtex
173
+ @misc{Zare Chavoshi_2023,
174
+ title={EasyDeL: An open-source library for enhancing and streamlining the training process of machine learning models},
175
+ url={https://github.com/erfanzar/EasyDeL},
176
+ author={Zare Chavoshi, Erfan},
177
+ year={2023}
178
+ }
179
+ ```
checkpoint_metadata.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "timestamp": "2025-12-28T04:14:13.950976",
3
+ "custom_metadata": {
4
+ "step": 0
5
+ }
6
+ }
config.json ADDED
@@ -0,0 +1,349 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_external_rope_config_kwargs": {},
3
+ "architectures": [
4
+ "Glm4vForConditionalGeneration"
5
+ ],
6
+ "attn_mechanism": "vanilla",
7
+ "backend": null,
8
+ "bits": null,
9
+ "blocksize_b": 1,
10
+ "blocksize_k": 128,
11
+ "blocksize_q": 128,
12
+ "decode_attn_mechanism": null,
13
+ "easy_method": "train",
14
+ "fcm_max_ratio": 0.0,
15
+ "fcm_min_ratio": 0.0,
16
+ "flash_attention_backward_pass_impl": "triton",
17
+ "fsdp_is_ep_bound": true,
18
+ "gradient_checkpointing": "",
19
+ "gradient_checkpointing_targets": null,
20
+ "hardware_abstraction": false,
21
+ "image_end_token_id": 151340,
22
+ "image_start_token_id": 151339,
23
+ "image_token_id": 151363,
24
+ "kv_cache_quantization_config": null,
25
+ "kv_cache_sharding_sequence_axis_name": "sp",
26
+ "model_type": "glm4v",
27
+ "moe_force_xla_gmm": false,
28
+ "moe_method": "fused_moe",
29
+ "moe_tiling_size_batch": 4,
30
+ "moe_tiling_size_dim": 128,
31
+ "moe_tiling_size_seqlen": 128,
32
+ "operation_configs": null,
33
+ "pallas_k_block_size": 128,
34
+ "pallas_m_block_size": 128,
35
+ "pallas_n_block_size": 128,
36
+ "partition_axis": {
37
+ "attention_dim_axis": null,
38
+ "attention_kv_dim_axis": null,
39
+ "batch_axis": [
40
+ "fsdp",
41
+ "dp"
42
+ ],
43
+ "bias_head_sequence_axis": null,
44
+ "bias_key_sequence_axis": null,
45
+ "data_parallel_axis": "dp",
46
+ "decode_attention_dim_axis": null,
47
+ "decode_attention_kv_dim_axis": null,
48
+ "decode_batch_axis": [
49
+ "fsdp",
50
+ "dp"
51
+ ],
52
+ "decode_head_axis": "tp",
53
+ "decode_key_sequence_axis": "sp",
54
+ "decode_kv_head_axis": "tp",
55
+ "decode_query_sequence_axis": null,
56
+ "expert_axis": "ep",
57
+ "expert_gate_axis": null,
58
+ "expert_parallel_axis": "ep",
59
+ "fully_sharded_data_parallel_axis": "fsdp",
60
+ "head_axis": "tp",
61
+ "hidden_state_axis": "tp",
62
+ "key_sequence_axis": "sp",
63
+ "kv_head_axis": "tp",
64
+ "mlp_intermediate_axis": "tp",
65
+ "query_sequence_axis": "sp",
66
+ "sequence_axis": "sp",
67
+ "sequence_parallel_axis": "sp",
68
+ "tensor_parallel_axis": "tp",
69
+ "vocab_axis": "tp"
70
+ },
71
+ "platform": null,
72
+ "precompute_masks": true,
73
+ "pretraining_tp": 1,
74
+ "quantization_config": null,
75
+ "scan_attention_layers": false,
76
+ "scan_mlp_chunk_size": 1024,
77
+ "scan_ring_attention": true,
78
+ "sequence_axis_name": "sp",
79
+ "sharding_axis_dims": [
80
+ 1,
81
+ -1,
82
+ 1,
83
+ 1,
84
+ 1
85
+ ],
86
+ "sharding_axis_names": [
87
+ "dp",
88
+ "fsdp",
89
+ "ep",
90
+ "tp",
91
+ "sp"
92
+ ],
93
+ "sharding_dcn_axis_dims": null,
94
+ "sp_is_ep_bound": true,
95
+ "text_config": {
96
+ "_external_rope_config_kwargs": {
97
+ "repetition_style": true
98
+ },
99
+ "architectures": [
100
+ "Glm4vForConditionalGeneration"
101
+ ],
102
+ "attention_bias": true,
103
+ "attention_dropout": 0.0,
104
+ "attn_mechanism": "vanilla",
105
+ "backend": null,
106
+ "bits": null,
107
+ "blocksize_b": 1,
108
+ "blocksize_k": 128,
109
+ "blocksize_q": 128,
110
+ "decode_attn_mechanism": null,
111
+ "dtype": "bfloat16",
112
+ "easy_method": "train",
113
+ "eos_token_id": [
114
+ 151329,
115
+ 151336,
116
+ 151338
117
+ ],
118
+ "fcm_max_ratio": 0.0,
119
+ "fcm_min_ratio": 0.0,
120
+ "flash_attention_backward_pass_impl": "triton",
121
+ "fsdp_is_ep_bound": true,
122
+ "gradient_checkpointing": "",
123
+ "gradient_checkpointing_targets": null,
124
+ "hardware_abstraction": false,
125
+ "head_dim": 128,
126
+ "hidden_act": "silu",
127
+ "hidden_size": 4096,
128
+ "initializer_range": 0.02,
129
+ "intermediate_size": 13696,
130
+ "kv_cache_quantization_config": null,
131
+ "kv_cache_sharding_sequence_axis_name": "sp",
132
+ "max_position_embeddings": 131072,
133
+ "model_type": "glm4v_text",
134
+ "moe_force_xla_gmm": false,
135
+ "moe_method": "fused_moe",
136
+ "moe_tiling_size_batch": 4,
137
+ "moe_tiling_size_dim": 128,
138
+ "moe_tiling_size_seqlen": 128,
139
+ "num_attention_heads": 32,
140
+ "num_hidden_layers": 40,
141
+ "num_key_value_heads": 2,
142
+ "operation_configs": null,
143
+ "pad_token_id": 151329,
144
+ "pallas_k_block_size": 128,
145
+ "pallas_m_block_size": 128,
146
+ "pallas_n_block_size": 128,
147
+ "partial_rotary_factor": 0.5,
148
+ "partition_axis": {
149
+ "attention_dim_axis": null,
150
+ "attention_kv_dim_axis": null,
151
+ "batch_axis": [
152
+ "fsdp",
153
+ "dp"
154
+ ],
155
+ "bias_head_sequence_axis": null,
156
+ "bias_key_sequence_axis": null,
157
+ "data_parallel_axis": "dp",
158
+ "decode_attention_dim_axis": null,
159
+ "decode_attention_kv_dim_axis": null,
160
+ "decode_batch_axis": [
161
+ "fsdp",
162
+ "dp"
163
+ ],
164
+ "decode_head_axis": "tp",
165
+ "decode_key_sequence_axis": "sp",
166
+ "decode_kv_head_axis": "tp",
167
+ "decode_query_sequence_axis": null,
168
+ "expert_axis": "ep",
169
+ "expert_gate_axis": null,
170
+ "expert_parallel_axis": "ep",
171
+ "fully_sharded_data_parallel_axis": "fsdp",
172
+ "head_axis": "tp",
173
+ "hidden_state_axis": "tp",
174
+ "key_sequence_axis": "sp",
175
+ "kv_head_axis": "tp",
176
+ "mlp_intermediate_axis": "tp",
177
+ "query_sequence_axis": "sp",
178
+ "sequence_axis": "sp",
179
+ "sequence_parallel_axis": "sp",
180
+ "tensor_parallel_axis": "tp",
181
+ "vocab_axis": "tp"
182
+ },
183
+ "platform": null,
184
+ "precompute_masks": true,
185
+ "pretraining_tp": 1,
186
+ "quantization_config": null,
187
+ "rms_norm_eps": 1e-05,
188
+ "rope_scaling": {
189
+ "mrope_section": [
190
+ 8,
191
+ 12,
192
+ 12
193
+ ],
194
+ "rope_type": "default"
195
+ },
196
+ "rope_theta": 500000.0,
197
+ "scan_attention_layers": false,
198
+ "scan_mlp_chunk_size": 1024,
199
+ "scan_ring_attention": true,
200
+ "sequence_axis_name": "sp",
201
+ "sharding_axis_dims": [
202
+ 1,
203
+ -1,
204
+ 1,
205
+ 1,
206
+ 1
207
+ ],
208
+ "sharding_axis_names": [
209
+ "dp",
210
+ "fsdp",
211
+ "ep",
212
+ "tp",
213
+ "sp"
214
+ ],
215
+ "sharding_dcn_axis_dims": null,
216
+ "sp_is_ep_bound": true,
217
+ "use_cache": true,
218
+ "use_expert_tensor_mode": false,
219
+ "use_ring_of_experts": false,
220
+ "use_scan_mlp": false,
221
+ "use_sharded_kv_caching": false,
222
+ "use_sharding_constraint": false,
223
+ "vocab_size": 151552
224
+ },
225
+ "tie_word_embeddings": false,
226
+ "transformers_version": "4.57.3",
227
+ "use_expert_tensor_mode": false,
228
+ "use_ring_of_experts": false,
229
+ "use_scan_mlp": false,
230
+ "use_sharded_kv_caching": false,
231
+ "use_sharding_constraint": false,
232
+ "video_end_token_id": 151342,
233
+ "video_start_token_id": 151341,
234
+ "video_token_id": 151364,
235
+ "vision_config": {
236
+ "_external_rope_config_kwargs": {},
237
+ "architectures": [
238
+ "Glm4vForConditionalGeneration"
239
+ ],
240
+ "attention_bias": false,
241
+ "attention_dropout": 0.0,
242
+ "attn_mechanism": "vanilla",
243
+ "backend": null,
244
+ "bits": null,
245
+ "blocksize_b": 1,
246
+ "blocksize_k": 128,
247
+ "blocksize_q": 128,
248
+ "decode_attn_mechanism": null,
249
+ "depth": 24,
250
+ "easy_method": "train",
251
+ "fcm_max_ratio": 0.0,
252
+ "fcm_min_ratio": 0.0,
253
+ "flash_attention_backward_pass_impl": "triton",
254
+ "fsdp_is_ep_bound": true,
255
+ "gradient_checkpointing": "",
256
+ "gradient_checkpointing_targets": null,
257
+ "hardware_abstraction": false,
258
+ "hidden_act": "silu",
259
+ "hidden_dropout_prob": 0.0,
260
+ "hidden_size": 1536,
261
+ "image_size": 336,
262
+ "in_channels": 3,
263
+ "initializer_range": 0.02,
264
+ "intermediate_size": 13696,
265
+ "kv_cache_quantization_config": null,
266
+ "kv_cache_sharding_sequence_axis_name": "sp",
267
+ "model_type": "glm4v_vision",
268
+ "moe_force_xla_gmm": false,
269
+ "moe_method": "fused_moe",
270
+ "moe_tiling_size_batch": 4,
271
+ "moe_tiling_size_dim": 128,
272
+ "moe_tiling_size_seqlen": 128,
273
+ "num_attention_heads": 12,
274
+ "num_heads": 12,
275
+ "operation_configs": null,
276
+ "out_hidden_size": 4096,
277
+ "pallas_k_block_size": 128,
278
+ "pallas_m_block_size": 128,
279
+ "pallas_n_block_size": 128,
280
+ "partition_axis": {
281
+ "attention_dim_axis": null,
282
+ "attention_kv_dim_axis": null,
283
+ "batch_axis": [
284
+ "fsdp",
285
+ "dp"
286
+ ],
287
+ "bias_head_sequence_axis": null,
288
+ "bias_key_sequence_axis": null,
289
+ "data_parallel_axis": "dp",
290
+ "decode_attention_dim_axis": null,
291
+ "decode_attention_kv_dim_axis": null,
292
+ "decode_batch_axis": [
293
+ "fsdp",
294
+ "dp"
295
+ ],
296
+ "decode_head_axis": "tp",
297
+ "decode_key_sequence_axis": "sp",
298
+ "decode_kv_head_axis": "tp",
299
+ "decode_query_sequence_axis": null,
300
+ "expert_axis": "ep",
301
+ "expert_gate_axis": null,
302
+ "expert_parallel_axis": "ep",
303
+ "fully_sharded_data_parallel_axis": "fsdp",
304
+ "head_axis": "tp",
305
+ "hidden_state_axis": "tp",
306
+ "key_sequence_axis": "sp",
307
+ "kv_head_axis": "tp",
308
+ "mlp_intermediate_axis": "tp",
309
+ "query_sequence_axis": "sp",
310
+ "sequence_axis": "sp",
311
+ "sequence_parallel_axis": "sp",
312
+ "tensor_parallel_axis": "tp",
313
+ "vocab_axis": "tp"
314
+ },
315
+ "patch_size": 14,
316
+ "platform": null,
317
+ "precompute_masks": true,
318
+ "pretraining_tp": 1,
319
+ "quantization_config": null,
320
+ "rms_norm_eps": 1e-05,
321
+ "scan_attention_layers": false,
322
+ "scan_mlp_chunk_size": 1024,
323
+ "scan_ring_attention": true,
324
+ "sequence_axis_name": "sp",
325
+ "sharding_axis_dims": [
326
+ 1,
327
+ -1,
328
+ 1,
329
+ 1,
330
+ 1
331
+ ],
332
+ "sharding_axis_names": [
333
+ "dp",
334
+ "fsdp",
335
+ "ep",
336
+ "tp",
337
+ "sp"
338
+ ],
339
+ "sharding_dcn_axis_dims": null,
340
+ "sp_is_ep_bound": true,
341
+ "spatial_merge_size": 2,
342
+ "temporal_patch_size": 2,
343
+ "use_expert_tensor_mode": false,
344
+ "use_ring_of_experts": false,
345
+ "use_scan_mlp": false,
346
+ "use_sharded_kv_caching": false,
347
+ "use_sharding_constraint": false
348
+ }
349
+ }
generation_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151329,
6
+ 151336,
7
+ 151338,
8
+ 151348
9
+ ],
10
+ "pad_token_id": 151329,
11
+ "temperature": 0.8,
12
+ "top_k": 2,
13
+ "top_p": 0.6,
14
+ "transformers_version": "4.57.3"
15
+ }
model/params/model/language_model/layers/0/self_attn/k_proj/bias/0 ADDED
Binary file (484 Bytes). View file
 
model/params/model/language_model/layers/0/self_attn/o_proj/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[4096,1024],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[4096,4096],"zarr_format":2}
model/params/model/language_model/layers/1/mlp/down_proj/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[13696,1024],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[13696,4096],"zarr_format":2}
model/params/model/language_model/layers/1/self_attn/q_proj/bias/0 ADDED
Binary file (6.64 kB). View file
 
model/params/model/language_model/layers/1/self_attn/v_proj/bias/0 ADDED
Binary file (468 Bytes). View file
 
model/params/model/language_model/layers/10/input_layernorm/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[4096],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[4096],"zarr_format":2}
model/params/model/language_model/layers/10/post_mlp_layernorm/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[4096],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[4096],"zarr_format":2}
model/params/model/language_model/layers/10/self_attn/q_proj/bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[4096],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[4096],"zarr_format":2}
model/params/model/language_model/layers/10/self_attn/q_proj/bias/0 ADDED
Binary file (6.54 kB). View file
 
model/params/model/language_model/layers/10/self_attn/q_proj/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[1024,4096],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[4096,4096],"zarr_format":2}
model/params/model/language_model/layers/10/self_attn/v_proj/bias/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[256],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[256],"zarr_format":2}
model/params/model/language_model/layers/10/self_attn/v_proj/bias/0 ADDED
Binary file (462 Bytes). View file
 
model/params/model/language_model/layers/10/self_attn/v_proj/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[1024,256],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[4096,256],"zarr_format":2}
model/params/model/language_model/layers/11/input_layernorm/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[4096],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[4096],"zarr_format":2}
model/params/model/language_model/layers/11/input_layernorm/kernel/0 ADDED
Binary file (5.31 kB). View file
 
model/params/model/language_model/layers/11/mlp/down_proj/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[13696,1024],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[13696,4096],"zarr_format":2}
model/params/model/language_model/layers/11/mlp/gate_up_proj/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[1024,27392],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[4096,27392],"zarr_format":2}
model/params/model/language_model/layers/11/post_attention_layernorm/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[4096],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[4096],"zarr_format":2}
model/params/model/language_model/layers/11/post_attention_layernorm/kernel/0 ADDED
Binary file (5.27 kB). View file
 
model/params/model/language_model/layers/11/post_mlp_layernorm/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[4096],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[4096],"zarr_format":2}
model/params/model/language_model/layers/11/post_mlp_layernorm/kernel/0 ADDED
Binary file (5.09 kB). View file
 
model/params/model/language_model/layers/11/post_self_attn_layernorm/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[4096],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[4096],"zarr_format":2}
model/params/model/language_model/layers/11/post_self_attn_layernorm/kernel/0 ADDED
Binary file (5.13 kB). View file
 
model/params/model/language_model/layers/11/self_attn/k_proj/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[1024,256],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[4096,256],"zarr_format":2}
model/params/model/visual/blocks/4/norm1/kernel/0 ADDED
Binary file (2.24 kB). View file
 
model/params/model/visual/blocks/4/norm2/kernel/0 ADDED
Binary file (1.27 kB). View file
 
model/params/model/visual/blocks/5/attn/proj/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[1536,384],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[1536,1536],"zarr_format":2}
model/params/model/visual/blocks/5/attn/qkv/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[384,4608],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[1536,4608],"zarr_format":2}
model/params/model/visual/blocks/5/mlp/gate_proj/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[384,4096],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[1536,4096],"zarr_format":2}
model/params/model/visual/blocks/5/mlp/up_proj/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[384,4096],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[1536,4096],"zarr_format":2}
model/params/model/visual/blocks/5/norm1/kernel/0 ADDED
Binary file (2.19 kB). View file
 
model/params/model/visual/blocks/5/norm2/kernel/0 ADDED
Binary file (1.18 kB). View file
 
model/params/model/visual/blocks/6/attn/proj/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[1536,384],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[1536,1536],"zarr_format":2}
model/params/model/visual/blocks/6/mlp/down_proj/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[4096,384],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[4096,1536],"zarr_format":2}
model/params/model/visual/blocks/6/mlp/gate_proj/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[384,4096],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[1536,4096],"zarr_format":2}
model/params/model/visual/blocks/6/mlp/up_proj/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[384,4096],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[1536,4096],"zarr_format":2}
model/params/model/visual/blocks/6/norm1/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[1536],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[1536],"zarr_format":2}
model/params/model/visual/blocks/6/norm2/kernel/0 ADDED
Binary file (1.15 kB). View file
 
model/params/model/visual/blocks/7/attn/qkv/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[384,4608],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[1536,4608],"zarr_format":2}
model/params/model/visual/blocks/7/norm1/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[1536],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[1536],"zarr_format":2}
model/params/model/visual/blocks/7/norm1/kernel/0 ADDED
Binary file (2.16 kB). View file
 
model/params/model/visual/blocks/7/norm2/kernel/0 ADDED
Binary file (1.04 kB). View file
 
model/params/model/visual/blocks/8/attn/qkv/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[384,4608],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[1536,4608],"zarr_format":2}
model/params/model/visual/blocks/8/mlp/down_proj/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[4096,384],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[4096,1536],"zarr_format":2}
model/params/model/visual/blocks/8/norm1/kernel/0 ADDED
Binary file (2.04 kB). View file
 
model/params/model/visual/blocks/8/norm2/kernel/.zarray ADDED
@@ -0,0 +1 @@
 
 
1
+ {"chunks":[1536],"compressor":{"id":"zstd","level":1},"dimension_separator":".","dtype":"bfloat16","fill_value":null,"filters":null,"order":"C","shape":[1536],"zarr_format":2}