Update config.json
Browse files- config.json +3 -3
config.json
CHANGED
|
@@ -4,7 +4,7 @@
|
|
| 4 |
"anyres_vit_max_image_size": 2048,
|
| 5 |
"anyres_vit_two_views": false,
|
| 6 |
"architectures": [
|
| 7 |
-
"
|
| 8 |
],
|
| 9 |
"attention_bias": false,
|
| 10 |
"attention_dropout": 0.0,
|
|
@@ -33,7 +33,7 @@
|
|
| 33 |
"max_position_embeddings": 32768,
|
| 34 |
"tie_word_embeddings": true,
|
| 35 |
"mlp_bias": false,
|
| 36 |
-
"model_type": "
|
| 37 |
"moe_drop_tokens": false,
|
| 38 |
"moe_intermediate_size": null,
|
| 39 |
"moe_layer_num_skipped": 0,
|
|
@@ -71,7 +71,7 @@
|
|
| 71 |
"factor": 1.0,
|
| 72 |
"mscale": 1.0,
|
| 73 |
"mscale_all_dim": 1.0,
|
| 74 |
-
"type": "
|
| 75 |
},
|
| 76 |
"rope_theta": 10000.0,
|
| 77 |
"routed_scaling_factor": 1.0,
|
|
|
|
| 4 |
"anyres_vit_max_image_size": 2048,
|
| 5 |
"anyres_vit_two_views": false,
|
| 6 |
"architectures": [
|
| 7 |
+
"HunYuanVLForConditionalGeneration"
|
| 8 |
],
|
| 9 |
"attention_bias": false,
|
| 10 |
"attention_dropout": 0.0,
|
|
|
|
| 33 |
"max_position_embeddings": 32768,
|
| 34 |
"tie_word_embeddings": true,
|
| 35 |
"mlp_bias": false,
|
| 36 |
+
"model_type": "hunyuan_vl",
|
| 37 |
"moe_drop_tokens": false,
|
| 38 |
"moe_intermediate_size": null,
|
| 39 |
"moe_layer_num_skipped": 0,
|
|
|
|
| 71 |
"factor": 1.0,
|
| 72 |
"mscale": 1.0,
|
| 73 |
"mscale_all_dim": 1.0,
|
| 74 |
+
"type": "xdrope"
|
| 75 |
},
|
| 76 |
"rope_theta": 10000.0,
|
| 77 |
"routed_scaling_factor": 1.0,
|