benjamin-paine commited on
Commit
abbd1cf
·
verified ·
1 Parent(s): 17ec1e3

Update embeddings_processor/config.json

Browse files
Files changed (1) hide show
  1. embeddings_processor/config.json +2 -2
embeddings_processor/config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_class_name": "LTX2EmbeddingsProcessorModel",
3
  "_diffusers_version": "0.37.0",
4
- "audio_apply_gated_attention": false,
5
  "audio_attention_head_dim": 64,
6
  "audio_double_precision_rope": true,
7
  "audio_num_attention_heads": 32,
@@ -14,7 +14,7 @@
14
  "feature_extractor_embedding_dim": 3840,
15
  "feature_extractor_flat_dim": 188160,
16
  "feature_extractor_video_dim": 4096,
17
- "video_apply_gated_attention": false,
18
  "video_attention_head_dim": 128,
19
  "video_double_precision_rope": true,
20
  "video_num_attention_heads": 32,
 
1
  {
2
  "_class_name": "LTX2EmbeddingsProcessorModel",
3
  "_diffusers_version": "0.37.0",
4
+ "audio_apply_gated_attention": true,
5
  "audio_attention_head_dim": 64,
6
  "audio_double_precision_rope": true,
7
  "audio_num_attention_heads": 32,
 
14
  "feature_extractor_embedding_dim": 3840,
15
  "feature_extractor_flat_dim": 188160,
16
  "feature_extractor_video_dim": 4096,
17
+ "video_apply_gated_attention": true,
18
  "video_attention_head_dim": 128,
19
  "video_double_precision_rope": true,
20
  "video_num_attention_heads": 32,