FastFlowLM commited on
Commit
af67e78
·
verified ·
1 Parent(s): abfdf5c

feat: upload vision part

Browse files
Files changed (5) hide show
  1. .gitattributes +3 -0
  2. config.json +37 -34
  3. vision_attn.xclbin +3 -0
  4. vision_mm.xclbin +3 -0
  5. vision_weight.q4nx +3 -0
.gitattributes CHANGED
@@ -40,3 +40,6 @@ lm_head.xclbin filter=lfs diff=lfs merge=lfs -text
40
  mm.xclbin filter=lfs diff=lfs merge=lfs -text
41
  model.q4nx filter=lfs diff=lfs merge=lfs -text
42
  tokenizer.json filter=lfs diff=lfs merge=lfs -text
 
 
 
 
40
  mm.xclbin filter=lfs diff=lfs merge=lfs -text
41
  model.q4nx filter=lfs diff=lfs merge=lfs -text
42
  tokenizer.json filter=lfs diff=lfs merge=lfs -text
43
+ vision_attn.xclbin filter=lfs diff=lfs merge=lfs -text
44
+ vision_mm.xclbin filter=lfs diff=lfs merge=lfs -text
45
+ vision_weight.q4nx filter=lfs diff=lfs merge=lfs -text
config.json CHANGED
@@ -1,34 +1,37 @@
1
- {
2
- "model_type": "qwen3",
3
- "vocab_size": 151936,
4
- "max_position_embeddings": 40960,
5
- "hidden_size": 2560,
6
- "intermediate_size": 9728,
7
- "num_hidden_layers": 36,
8
- "num_attention_heads": 32,
9
- "use_sliding_window": false,
10
- "sliding_window": null,
11
- "max_window_layers": 36,
12
- "num_key_value_heads": 8,
13
- "head_dim": 128,
14
- "hidden_act": "silu",
15
- "initializer_range": 0.02,
16
- "rms_norm_eps": 1e-06,
17
- "use_cache": true,
18
- "rope_theta": 1000000,
19
- "rope_scaling": null,
20
- "attention_bias": false,
21
- "attention_dropout": 0.0,
22
- "architectures": [
23
- "LlamaForCausalLM"
24
- ],
25
- "torch_dtype": "bfloat16",
26
- "transformers_version": "4.45.0.dev0",
27
- "use_cache": true,
28
- "addr_qk": 9216,
29
- "addr_kv": 34816,
30
- "addr_l_begin_mha": 54272,
31
- "addr_l_end_mha": 25600,
32
- "addr_kk": 45056,
33
- "flm_version": "0.9.6"
34
- }
 
 
 
 
1
+ {
2
+ "model_type": "qwen3",
3
+ "vocab_size": 151936,
4
+ "max_position_embeddings": 40960,
5
+ "hidden_size": 2560,
6
+ "intermediate_size": 9728,
7
+ "num_hidden_layers": 36,
8
+ "num_attention_heads": 32,
9
+ "use_sliding_window": false,
10
+ "sliding_window": null,
11
+ "max_window_layers": 36,
12
+ "num_key_value_heads": 8,
13
+ "head_dim": 128,
14
+ "hidden_act": "silu",
15
+ "initializer_range": 0.02,
16
+ "rms_norm_eps": 1e-06,
17
+ "use_cache": true,
18
+ "rope_theta": 1000000,
19
+ "rope_scaling": null,
20
+ "attention_bias": false,
21
+ "attention_dropout": 0.0,
22
+ "architectures": [
23
+ "LlamaForCausalLM"
24
+ ],
25
+ "torch_dtype": "bfloat16",
26
+ "transformers_version": "4.45.0.dev0",
27
+ "use_cache": true,
28
+ "addr_qk": 9216,
29
+ "addr_kv": 34816,
30
+ "addr_l_begin_mha": 54272,
31
+ "addr_l_end_mha": 25600,
32
+ "addr_kk": 45056,
33
+ "flm_version": "0.9.16",
34
+ "vision_model_weight": "vision_weight.q4nx",
35
+ "vision_mm_engine_xclbin_name": "vision_mm.xclbin",
36
+ "vision_mha_engine_xclbin_name":"vision_attn.xclbin"
37
+ }
vision_attn.xclbin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6abbd4157ea07d048bfb1b717a9c6f72044c07d7b60169bd921a4d9a9acfdda0
3
+ size 242043
vision_mm.xclbin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a419730401233f197ee8fbd1a0ee50339d25975a244325a9ca79fa44228de6a
3
+ size 580539
vision_weight.q4nx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81ec06fa8b590e8a307e11eea29231e715fc83033ac88f2daba6448b878b36c1
3
+ size 830730632