sreepathi-ravikumar commited on
Commit
706d234
·
verified ·
1 Parent(s): b0d8952

Update rust_highlight/src/lib.rs

Browse files
Files changed (1) hide show
  1. rust_highlight/src/lib.rs +129 -60
rust_highlight/src/lib.rs CHANGED
@@ -10,16 +10,18 @@ use std::time::Instant;
10
  use std::path::Path;
11
  use std::f64::consts::PI;
12
  use rayon::prelude::*;
 
13
 
14
  #[pyfunction]
15
  fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f64, clips_dir: String) -> PyResult<Option<String>> {
 
16
  if !Path::new(&audio_path).exists() {
17
  return Err(pyo3::exceptions::PyFileNotFoundError::new_err(format!("Audio not found: {}", audio_path)));
18
  }
19
 
20
  let skip_spaces = false;
21
  let fps: f64 = 30.0;
22
- let animation_frames_per_char: usize = 1; // Reduced from 2 for speed
23
  let width: i32 = 1280;
24
  let height: i32 = 720;
25
  let margin_x: i32 = 40;
@@ -53,14 +55,16 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
53
  let text_area_width = width - 2 * margin_x;
54
  let (wrapped_lines, line_styles) = wrap_text_cv(&text, font, default_font_scale, default_thickness, text_area_width, header_font_scale, header_thickness);
55
 
56
- let full_text = wrapped_lines.join("\n");
 
57
  if full_text.is_empty() {
58
  println!("No text to animate.");
59
  return Ok(None);
60
  }
61
 
62
  let visible_indices: Vec<usize> = if skip_spaces {
63
- full_text.char_indices().filter(|&(_, ch)| ch != ' ' && ch != '\n' && ch != '\t').map(|(i, _)| i).collect()
 
64
  } else {
65
  (0..full_text.len()).collect()
66
  };
@@ -72,10 +76,16 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
72
  return Ok(None);
73
  }
74
 
75
- // Calculate durations
76
  let animation_duration = duration / 4.0;
77
  let static_duration = duration - animation_duration;
78
 
 
 
 
 
 
 
79
  println!("Animation duration: {:.3}s, Static duration: {:.3}s", animation_duration, static_duration);
80
 
81
  // Pre-calc line heights and y_positions
@@ -96,7 +106,7 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
96
 
97
  let t0 = Instant::now();
98
 
99
- // STEP 1: Pre-render the final static frame (used for Phase 2)
100
  println!("Rendering static frame...");
101
  let static_frame = render_frame(
102
  &full_text,
@@ -129,17 +139,23 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
129
  opencv::imgcodecs::imwrite(&static_frame_path, &static_frame, &params)
130
  .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("Failed to save static frame: {}", e)))?;
131
 
132
- // STEP 2: Render animation frames in parallel (HUGE speedup)
133
- println!("Rendering animation frames in parallel...");
134
-
135
- // Collect frame data for parallel processing
 
 
 
 
 
136
  let mut frame_specs: Vec<(String, i32, i32, usize)> = Vec::new();
137
  let mut prev_visible_sub = String::new();
138
 
139
  for &idx_in_full in visible_indices.iter() {
140
  let visible_sub = full_text[0..=idx_in_full].to_string();
141
  if visible_sub != prev_visible_sub {
142
- let lines: Vec<&str> = visible_sub.split('\n').collect();
 
143
  let last_line = lines.last().unwrap();
144
  let line_idx = lines.len() - 1;
145
  let is_header = line_styles[line_idx];
@@ -161,7 +177,8 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
161
 
162
  println!("Total animation frames to render: {}", frame_specs.len());
163
 
164
- // STEP 3: Start FFmpeg process first
 
165
  let mut child = Command::new("ffmpeg")
166
  .arg("-y")
167
  .arg("-f").arg("rawvideo")
@@ -176,74 +193,111 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
176
  .arg("-pix_fmt").arg("yuv420p")
177
  .arg(&animation_video_path)
178
  .stdin(Stdio::piped())
 
179
  .spawn()
180
  .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("Failed to spawn FFmpeg: {}", e)))?;
181
 
182
  let mut stdin = child.stdin.take().unwrap();
183
 
184
- // Parallel rendering using rayon and streaming to FFmpeg
185
- let animation_frames: Vec<Vec<u8>> = frame_specs
186
- .par_iter()
187
- .map(|(visible_sub, pen_x, pen_y, anim_step)| {
188
- let anim_offset = (*anim_step as f64) / (animation_frames_per_char as f64);
189
- let frame = render_frame(
190
- visible_sub,
191
- *pen_x,
192
- *pen_y,
193
- anim_offset,
194
- width,
195
- height,
196
- &line_styles,
197
- &y_positions,
198
- margin_x,
199
- font,
200
- default_font_scale,
201
- header_font_scale,
202
- default_thickness,
203
- header_thickness,
204
- default_text_color,
205
- header_text_color,
206
- bg_color,
207
- pen_color,
208
- pen_tip_radius,
209
- pen_length,
210
- pen_thickness,
211
- pen_base_angle,
212
- pen_movement_amplitude,
213
- ).unwrap();
214
- frame.data_bytes().unwrap().to_vec()
215
- })
216
- .collect();
 
 
 
 
 
 
 
 
 
 
217
 
218
  println!("Animation frames rendered in {:.3}s", t0.elapsed().as_secs_f64());
219
 
220
- // Write all animation frames in one large batch
 
 
221
  let mut buffer: Vec<u8> = Vec::with_capacity(animation_frames.len() * width as usize * height as usize * 3);
 
222
  for frame_data in &animation_frames {
223
  buffer.extend_from_slice(frame_data);
224
  }
225
 
226
- stdin.write_all(&buffer)
227
- .map_err(|e| pyo3::exceptions::PyIOError::new_err(format!("Failed to write animation frames: {}", e)))?;
 
 
 
 
 
228
 
229
  drop(stdin);
230
- child.wait().map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("FFmpeg animation failed: {}", e)))?;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
231
 
232
  println!("Animation video created in {:.3}s", t0.elapsed().as_secs_f64());
233
 
234
- // STEP 4: Combine animation + static frame + audio using FFmpeg filters
 
 
235
  let animation_actual_duration = frame_specs.len() as f64 / fps;
236
  let speed_multiplier = animation_duration / animation_actual_duration;
237
 
238
- println!("Combining videos with FFmpeg filters...");
239
-
240
  let filter_complex = format!(
241
- "[0:v]setpts={}*PTS[v0];[1:v]loop=loop=-1:size=1:start=0,trim=duration={}[v1];[v0][v1]concat=n=2:v=1:a=0[outv]",
242
  speed_multiplier,
243
  static_duration
244
  );
245
 
246
- let mut combine_child = Command::new("ffmpeg")
247
  .arg("-y")
248
  .arg("-i").arg(&animation_video_path)
249
  .arg("-loop").arg("1")
@@ -252,6 +306,7 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
252
  .arg("-filter_complex").arg(&filter_complex)
253
  .arg("-map").arg("[outv]")
254
  .arg("-map").arg("2:a:0")
 
255
  .arg("-c:v").arg("libx264")
256
  .arg("-preset").arg("ultrafast")
257
  .arg("-crf").arg("28")
@@ -259,10 +314,16 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
259
  .arg("-c:a").arg("aac")
260
  .arg("-shortest")
261
  .arg(&final_video_path)
262
- .spawn()
263
- .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("Failed to spawn FFmpeg for combine: {}", e)))?;
264
-
265
- combine_child.wait().map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("FFmpeg combine failed: {}", e)))?;
 
 
 
 
 
 
266
 
267
  let elapsed = t0.elapsed().as_secs_f64();
268
  println!("Total processing time: {:.3}s", elapsed);
@@ -271,6 +332,14 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
271
  let _ = std::fs::remove_file(&animation_video_path);
272
  let _ = std::fs::remove_file(&static_frame_path);
273
 
 
 
 
 
 
 
 
 
274
  Ok(Some(final_video_path))
275
  }
276
 
@@ -363,7 +432,8 @@ fn render_frame(
363
  let mut img = Mat::new_rows_cols_with_default(height, width, CV_8UC3, bg_color)
364
  .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("Failed to create Mat: {}", e)))?;
365
 
366
- let lines: Vec<&str> = visible_text.split('\n').collect();
 
367
  for (idx, &line) in lines.iter().enumerate() {
368
  let is_header = line_styles[idx];
369
  let font_scale = if is_header { header_font_scale } else { default_font_scale };
@@ -400,5 +470,4 @@ fn render_frame(
400
  fn rust_highlight(_py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
401
  m.add_function(wrap_pyfunction!(generate_video_clip, m)?)?;
402
  Ok(())
403
- }
404
-
 
10
  use std::path::Path;
11
  use std::f64::consts::PI;
12
  use rayon::prelude::*;
13
+ use rayon::ThreadPoolBuilder;
14
 
15
  #[pyfunction]
16
  fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f64, clips_dir: String) -> PyResult<Option<String>> {
17
+ // Validate audio file exists
18
  if !Path::new(&audio_path).exists() {
19
  return Err(pyo3::exceptions::PyFileNotFoundError::new_err(format!("Audio not found: {}", audio_path)));
20
  }
21
 
22
  let skip_spaces = false;
23
  let fps: f64 = 30.0;
24
+ let animation_frames_per_char: usize = 1;
25
  let width: i32 = 1280;
26
  let height: i32 = 720;
27
  let margin_x: i32 = 40;
 
55
  let text_area_width = width - 2 * margin_x;
56
  let (wrapped_lines, line_styles) = wrap_text_cv(&text, font, default_font_scale, default_thickness, text_area_width, header_font_scale, header_thickness);
57
 
58
+ let full_text = wrapped_lines.join("
59
+ ");
60
  if full_text.is_empty() {
61
  println!("No text to animate.");
62
  return Ok(None);
63
  }
64
 
65
  let visible_indices: Vec<usize> = if skip_spaces {
66
+ full_text.char_indices().filter(|&(_, ch)| ch != ' ' && ch != '
67
+ ' && ch != '\t').map(|(i, _)| i).collect()
68
  } else {
69
  (0..full_text.len()).collect()
70
  };
 
76
  return Ok(None);
77
  }
78
 
79
+ // Calculate durations with validation
80
  let animation_duration = duration / 4.0;
81
  let static_duration = duration - animation_duration;
82
 
83
+ if static_duration <= 0.0 {
84
+ return Err(pyo3::exceptions::PyValueError::new_err(
85
+ format!("Invalid static_duration: {:.3}s (audio duration {:.3}s too short)", static_duration, duration)
86
+ ));
87
+ }
88
+
89
  println!("Animation duration: {:.3}s, Static duration: {:.3}s", animation_duration, static_duration);
90
 
91
  // Pre-calc line heights and y_positions
 
106
 
107
  let t0 = Instant::now();
108
 
109
+ // STEP 1: Pre-render the final static frame
110
  println!("Rendering static frame...");
111
  let static_frame = render_frame(
112
  &full_text,
 
139
  opencv::imgcodecs::imwrite(&static_frame_path, &static_frame, &params)
140
  .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("Failed to save static frame: {}", e)))?;
141
 
142
+ // Verify static frame was created
143
+ if !Path::new(&static_frame_path).exists() {
144
+ return Err(pyo3::exceptions::PyFileNotFoundError::new_err(
145
+ format!("Static frame not created at: {}", static_frame_path)
146
+ ));
147
+ }
148
+
149
+ // STEP 2: Build frame specifications
150
+ println!("Building frame specifications...");
151
  let mut frame_specs: Vec<(String, i32, i32, usize)> = Vec::new();
152
  let mut prev_visible_sub = String::new();
153
 
154
  for &idx_in_full in visible_indices.iter() {
155
  let visible_sub = full_text[0..=idx_in_full].to_string();
156
  if visible_sub != prev_visible_sub {
157
+ let lines: Vec<&str> = visible_sub.split('
158
+ ').collect();
159
  let last_line = lines.last().unwrap();
160
  let line_idx = lines.len() - 1;
161
  let is_header = line_styles[line_idx];
 
177
 
178
  println!("Total animation frames to render: {}", frame_specs.len());
179
 
180
+ // STEP 3: Start FFmpeg process for animation
181
+ println!("Starting FFmpeg process for animation...");
182
  let mut child = Command::new("ffmpeg")
183
  .arg("-y")
184
  .arg("-f").arg("rawvideo")
 
193
  .arg("-pix_fmt").arg("yuv420p")
194
  .arg(&animation_video_path)
195
  .stdin(Stdio::piped())
196
+ .stderr(Stdio::piped())
197
  .spawn()
198
  .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("Failed to spawn FFmpeg: {}", e)))?;
199
 
200
  let mut stdin = child.stdin.take().unwrap();
201
 
202
+ // STEP 4: Parallel rendering with limited thread pool
203
+ println!("Rendering animation frames in parallel...");
204
+
205
+ // Create limited thread pool to avoid OpenCV contention
206
+ let pool = ThreadPoolBuilder::new()
207
+ .num_threads(4)
208
+ .build()
209
+ .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("Failed to build thread pool: {}", e)))?;
210
+
211
+ let animation_frames: Vec<Vec<u8>> = pool.install(|| {
212
+ frame_specs
213
+ .par_iter()
214
+ .map(|(visible_sub, pen_x, pen_y, anim_step)| {
215
+ let anim_offset = (*anim_step as f64) / (animation_frames_per_char as f64);
216
+ let frame = render_frame(
217
+ visible_sub,
218
+ *pen_x,
219
+ *pen_y,
220
+ anim_offset,
221
+ width,
222
+ height,
223
+ &line_styles,
224
+ &y_positions,
225
+ margin_x,
226
+ font,
227
+ default_font_scale,
228
+ header_font_scale,
229
+ default_thickness,
230
+ header_thickness,
231
+ default_text_color,
232
+ header_text_color,
233
+ bg_color,
234
+ pen_color,
235
+ pen_tip_radius,
236
+ pen_length,
237
+ pen_thickness,
238
+ pen_base_angle,
239
+ pen_movement_amplitude,
240
+ ).unwrap();
241
+ frame.data_bytes().unwrap().to_vec()
242
+ })
243
+ .collect()
244
+ });
245
 
246
  println!("Animation frames rendered in {:.3}s", t0.elapsed().as_secs_f64());
247
 
248
+ // STEP 5: Write frames to FFmpeg in chunks to avoid broken pipe
249
+ println!("Writing frames to FFmpeg...");
250
+ const CHUNK_SIZE: usize = 1024 * 1024; // 1MB chunks
251
  let mut buffer: Vec<u8> = Vec::with_capacity(animation_frames.len() * width as usize * height as usize * 3);
252
+
253
  for frame_data in &animation_frames {
254
  buffer.extend_from_slice(frame_data);
255
  }
256
 
257
+ // Write in chunks to prevent pipe overflow
258
+ for chunk in buffer.chunks(CHUNK_SIZE) {
259
+ stdin.write_all(chunk)
260
+ .map_err(|e| pyo3::exceptions::PyIOError::new_err(
261
+ format!("Failed to write frame chunk to FFmpeg (broken pipe?): {}", e)
262
+ ))?;
263
+ }
264
 
265
  drop(stdin);
266
+
267
+ // Wait and check FFmpeg exit status
268
+ let output = child.wait_with_output()
269
+ .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("FFmpeg animation failed: {}", e)))?;
270
+
271
+ if !output.status.success() {
272
+ let stderr = String::from_utf8_lossy(&output.stderr);
273
+ return Err(pyo3::exceptions::PyRuntimeError::new_err(
274
+ format!("FFmpeg animation exited with status {:?}:
275
+ {}", output.status.code(), stderr)
276
+ ));
277
+ }
278
+
279
+ // Verify animation video was created
280
+ if !Path::new(&animation_video_path).exists() {
281
+ return Err(pyo3::exceptions::PyFileNotFoundError::new_err(
282
+ format!("Animation video not created at: {}", animation_video_path)
283
+ ));
284
+ }
285
 
286
  println!("Animation video created in {:.3}s", t0.elapsed().as_secs_f64());
287
 
288
+ // STEP 6: Combine animation + static frame + audio
289
+ println!("Combining videos with FFmpeg filters...");
290
+
291
  let animation_actual_duration = frame_specs.len() as f64 / fps;
292
  let speed_multiplier = animation_duration / animation_actual_duration;
293
 
 
 
294
  let filter_complex = format!(
295
+ "[0:v]setpts={}*PTS[v0];[1:v]loop=loop=-1:size=1:start=0,trim=duration={:.3}[v1];[v0][v1]concat=n=2:v=1:a=0[outv]",
296
  speed_multiplier,
297
  static_duration
298
  );
299
 
300
+ let combine_output = Command::new("ffmpeg")
301
  .arg("-y")
302
  .arg("-i").arg(&animation_video_path)
303
  .arg("-loop").arg("1")
 
306
  .arg("-filter_complex").arg(&filter_complex)
307
  .arg("-map").arg("[outv]")
308
  .arg("-map").arg("2:a:0")
309
+ .arg("-vsync").arg("cfr")
310
  .arg("-c:v").arg("libx264")
311
  .arg("-preset").arg("ultrafast")
312
  .arg("-crf").arg("28")
 
314
  .arg("-c:a").arg("aac")
315
  .arg("-shortest")
316
  .arg(&final_video_path)
317
+ .output()
318
+ .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("Failed to run FFmpeg combine: {}", e)))?;
319
+
320
+ if !combine_output.status.success() {
321
+ let stderr = String::from_utf8_lossy(&combine_output.stderr);
322
+ return Err(pyo3::exceptions::PyRuntimeError::new_err(
323
+ format!("FFmpeg combine exited with status {:?}:
324
+ {}", combine_output.status.code(), stderr)
325
+ ));
326
+ }
327
 
328
  let elapsed = t0.elapsed().as_secs_f64();
329
  println!("Total processing time: {:.3}s", elapsed);
 
332
  let _ = std::fs::remove_file(&animation_video_path);
333
  let _ = std::fs::remove_file(&static_frame_path);
334
 
335
+ // Final verification before returning
336
+ if !Path::new(&final_video_path).exists() {
337
+ return Err(pyo3::exceptions::PyFileNotFoundError::new_err(
338
+ format!("Final video was not created at: {}", final_video_path)
339
+ ));
340
+ }
341
+
342
+ println!("✓ Video successfully generated: {}", final_video_path);
343
  Ok(Some(final_video_path))
344
  }
345
 
 
432
  let mut img = Mat::new_rows_cols_with_default(height, width, CV_8UC3, bg_color)
433
  .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("Failed to create Mat: {}", e)))?;
434
 
435
+ let lines: Vec<&str> = visible_text.split('
436
+ ').collect();
437
  for (idx, &line) in lines.iter().enumerate() {
438
  let is_header = line_styles[idx];
439
  let font_scale = if is_header { header_font_scale } else { default_font_scale };
 
470
  fn rust_highlight(_py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
471
  m.add_function(wrap_pyfunction!(generate_video_clip, m)?)?;
472
  Ok(())
473
+ }