sreepathi-ravikumar commited on
Commit
31b0c24
·
verified ·
1 Parent(s): 141a650

Update rust_highlight/src/lib.rs

Browse files
Files changed (1) hide show
  1. rust_highlight/src/lib.rs +55 -122
rust_highlight/src/lib.rs CHANGED
@@ -10,18 +10,16 @@ use std::time::Instant;
10
  use std::path::Path;
11
  use std::f64::consts::PI;
12
  use rayon::prelude::*;
13
- use rayon::ThreadPoolBuilder;
14
 
15
  #[pyfunction]
16
  fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f64, clips_dir: String) -> PyResult<Option<String>> {
17
- // Validate audio file exists
18
  if !Path::new(&audio_path).exists() {
19
  return Err(pyo3::exceptions::PyFileNotFoundError::new_err(format!("Audio not found: {}", audio_path)));
20
  }
21
 
22
  let skip_spaces = false;
23
  let fps: f64 = 30.0;
24
- let animation_frames_per_char: usize = 1;
25
  let width: i32 = 1280;
26
  let height: i32 = 720;
27
  let margin_x: i32 = 40;
@@ -55,8 +53,7 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
55
  let text_area_width = width - 2 * margin_x;
56
  let (wrapped_lines, line_styles) = wrap_text_cv(&text, font, default_font_scale, default_thickness, text_area_width, header_font_scale, header_thickness);
57
 
58
- let full_text = wrapped_lines.join("
59
- ");
60
  if full_text.is_empty() {
61
  println!("No text to animate.");
62
  return Ok(None);
@@ -75,16 +72,10 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
75
  return Ok(None);
76
  }
77
 
78
- // Calculate durations with validation
79
  let animation_duration = duration / 4.0;
80
  let static_duration = duration - animation_duration;
81
 
82
- if static_duration <= 0.0 {
83
- return Err(pyo3::exceptions::PyValueError::new_err(
84
- format!("Invalid static_duration: {:.3}s (audio duration {:.3}s too short)", static_duration, duration)
85
- ));
86
- }
87
-
88
  println!("Animation duration: {:.3}s, Static duration: {:.3}s", animation_duration, static_duration);
89
 
90
  // Pre-calc line heights and y_positions
@@ -105,7 +96,7 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
105
 
106
  let t0 = Instant::now();
107
 
108
- // STEP 1: Pre-render the final static frame
109
  println!("Rendering static frame...");
110
  let static_frame = render_frame(
111
  &full_text,
@@ -138,15 +129,10 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
138
  opencv::imgcodecs::imwrite(&static_frame_path, &static_frame, &params)
139
  .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("Failed to save static frame: {}", e)))?;
140
 
141
- // Verify static frame was created
142
- if !Path::new(&static_frame_path).exists() {
143
- return Err(pyo3::exceptions::PyFileNotFoundError::new_err(
144
- format!("Static frame not created at: {}", static_frame_path)
145
- ));
146
- }
147
-
148
- // STEP 2: Build frame specifications
149
- println!("Building frame specifications...");
150
  let mut frame_specs: Vec<(String, i32, i32, usize)> = Vec::new();
151
  let mut prev_visible_sub = String::new();
152
 
@@ -175,8 +161,7 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
175
 
176
  println!("Total animation frames to render: {}", frame_specs.len());
177
 
178
- // STEP 3: Start FFmpeg process for animation
179
- println!("Starting FFmpeg process for animation...");
180
  let mut child = Command::new("ffmpeg")
181
  .arg("-y")
182
  .arg("-f").arg("rawvideo")
@@ -191,111 +176,74 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
191
  .arg("-pix_fmt").arg("yuv420p")
192
  .arg(&animation_video_path)
193
  .stdin(Stdio::piped())
194
- .stderr(Stdio::piped())
195
  .spawn()
196
  .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("Failed to spawn FFmpeg: {}", e)))?;
197
 
198
  let mut stdin = child.stdin.take().unwrap();
199
 
200
- // STEP 4: Parallel rendering with limited thread pool
201
- println!("Rendering animation frames in parallel...");
202
-
203
- // Create limited thread pool to avoid OpenCV contention
204
- let pool = ThreadPoolBuilder::new()
205
- .num_threads(4)
206
- .build()
207
- .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("Failed to build thread pool: {}", e)))?;
208
-
209
- let animation_frames: Vec<Vec<u8>> = pool.install(|| {
210
- frame_specs
211
- .par_iter()
212
- .map(|(visible_sub, pen_x, pen_y, anim_step)| {
213
- let anim_offset = (*anim_step as f64) / (animation_frames_per_char as f64);
214
- let frame = render_frame(
215
- visible_sub,
216
- *pen_x,
217
- *pen_y,
218
- anim_offset,
219
- width,
220
- height,
221
- &line_styles,
222
- &y_positions,
223
- margin_x,
224
- font,
225
- default_font_scale,
226
- header_font_scale,
227
- default_thickness,
228
- header_thickness,
229
- default_text_color,
230
- header_text_color,
231
- bg_color,
232
- pen_color,
233
- pen_tip_radius,
234
- pen_length,
235
- pen_thickness,
236
- pen_base_angle,
237
- pen_movement_amplitude,
238
- ).unwrap();
239
- frame.data_bytes().unwrap().to_vec()
240
- })
241
- .collect()
242
- });
243
 
244
  println!("Animation frames rendered in {:.3}s", t0.elapsed().as_secs_f64());
245
 
246
- // STEP 5: Write frames to FFmpeg in chunks to avoid broken pipe
247
- println!("Writing frames to FFmpeg...");
248
- const CHUNK_SIZE: usize = 1024 * 1024; // 1MB chunks
249
  let mut buffer: Vec<u8> = Vec::with_capacity(animation_frames.len() * width as usize * height as usize * 3);
250
-
251
  for frame_data in &animation_frames {
252
  buffer.extend_from_slice(frame_data);
253
  }
254
 
255
- // Write in chunks to prevent pipe overflow
256
- for chunk in buffer.chunks(CHUNK_SIZE) {
257
- stdin.write_all(chunk)
258
- .map_err(|e| pyo3::exceptions::PyIOError::new_err(
259
- format!("Failed to write frame chunk to FFmpeg (broken pipe?): {}", e)
260
- ))?;
261
- }
262
 
263
  drop(stdin);
264
-
265
- // Wait and check FFmpeg exit status
266
- let output = child.wait_with_output()
267
- .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("FFmpeg animation failed: {}", e)))?;
268
-
269
- if !output.status.success() {
270
- let stderr = String::from_utf8_lossy(&output.stderr);
271
- return Err(pyo3::exceptions::PyRuntimeError::new_err(
272
- format!("FFmpeg animation exited with status {:?}:
273
- {}", output.status.code(), stderr)
274
- ));
275
- }
276
-
277
- // Verify animation video was created
278
- if !Path::new(&animation_video_path).exists() {
279
- return Err(pyo3::exceptions::PyFileNotFoundError::new_err(
280
- format!("Animation video not created at: {}", animation_video_path)
281
- ));
282
- }
283
 
284
  println!("Animation video created in {:.3}s", t0.elapsed().as_secs_f64());
285
 
286
- // STEP 6: Combine animation + static frame + audio
287
- println!("Combining videos with FFmpeg filters...");
288
-
289
  let animation_actual_duration = frame_specs.len() as f64 / fps;
290
  let speed_multiplier = animation_duration / animation_actual_duration;
291
 
 
 
292
  let filter_complex = format!(
293
- "[0:v]setpts={}*PTS[v0];[1:v]loop=loop=-1:size=1:start=0,trim=duration={:.3}[v1];[v0][v1]concat=n=2:v=1:a=0[outv]",
294
  speed_multiplier,
295
  static_duration
296
  );
297
 
298
- let combine_output = Command::new("ffmpeg")
299
  .arg("-y")
300
  .arg("-i").arg(&animation_video_path)
301
  .arg("-loop").arg("1")
@@ -304,7 +252,6 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
304
  .arg("-filter_complex").arg(&filter_complex)
305
  .arg("-map").arg("[outv]")
306
  .arg("-map").arg("2:a:0")
307
- .arg("-vsync").arg("cfr")
308
  .arg("-c:v").arg("libx264")
309
  .arg("-preset").arg("ultrafast")
310
  .arg("-crf").arg("28")
@@ -312,16 +259,10 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
312
  .arg("-c:a").arg("aac")
313
  .arg("-shortest")
314
  .arg(&final_video_path)
315
- .output()
316
- .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("Failed to run FFmpeg combine: {}", e)))?;
317
-
318
- if !combine_output.status.success() {
319
- let stderr = String::from_utf8_lossy(&combine_output.stderr);
320
- return Err(pyo3::exceptions::PyRuntimeError::new_err(
321
- format!("FFmpeg combine exited with status {:?}:
322
- {}", combine_output.status.code(), stderr)
323
- ));
324
- }
325
 
326
  let elapsed = t0.elapsed().as_secs_f64();
327
  println!("Total processing time: {:.3}s", elapsed);
@@ -330,14 +271,6 @@ fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f6
330
  let _ = std::fs::remove_file(&animation_video_path);
331
  let _ = std::fs::remove_file(&static_frame_path);
332
 
333
- // Final verification before returning
334
- if !Path::new(&final_video_path).exists() {
335
- return Err(pyo3::exceptions::PyFileNotFoundError::new_err(
336
- format!("Final video was not created at: {}", final_video_path)
337
- ));
338
- }
339
-
340
- println!("✓ Video successfully generated: {}", final_video_path);
341
  Ok(Some(final_video_path))
342
  }
343
 
 
10
  use std::path::Path;
11
  use std::f64::consts::PI;
12
  use rayon::prelude::*;
 
13
 
14
  #[pyfunction]
15
  fn generate_video_clip(id: usize, text: String, audio_path: String, duration: f64, clips_dir: String) -> PyResult<Option<String>> {
 
16
  if !Path::new(&audio_path).exists() {
17
  return Err(pyo3::exceptions::PyFileNotFoundError::new_err(format!("Audio not found: {}", audio_path)));
18
  }
19
 
20
  let skip_spaces = false;
21
  let fps: f64 = 30.0;
22
+ let animation_frames_per_char: usize = 1; // Reduced from 2 for speed
23
  let width: i32 = 1280;
24
  let height: i32 = 720;
25
  let margin_x: i32 = 40;
 
53
  let text_area_width = width - 2 * margin_x;
54
  let (wrapped_lines, line_styles) = wrap_text_cv(&text, font, default_font_scale, default_thickness, text_area_width, header_font_scale, header_thickness);
55
 
56
+ let full_text = wrapped_lines.join("\n");
 
57
  if full_text.is_empty() {
58
  println!("No text to animate.");
59
  return Ok(None);
 
72
  return Ok(None);
73
  }
74
 
75
+ // Calculate durations
76
  let animation_duration = duration / 4.0;
77
  let static_duration = duration - animation_duration;
78
 
 
 
 
 
 
 
79
  println!("Animation duration: {:.3}s, Static duration: {:.3}s", animation_duration, static_duration);
80
 
81
  // Pre-calc line heights and y_positions
 
96
 
97
  let t0 = Instant::now();
98
 
99
+ // STEP 1: Pre-render the final static frame (used for Phase 2)
100
  println!("Rendering static frame...");
101
  let static_frame = render_frame(
102
  &full_text,
 
129
  opencv::imgcodecs::imwrite(&static_frame_path, &static_frame, &params)
130
  .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("Failed to save static frame: {}", e)))?;
131
 
132
+ // STEP 2: Render animation frames in parallel (HUGE speedup)
133
+ println!("Rendering animation frames in parallel...");
134
+
135
+ // Collect frame data for parallel processing
 
 
 
 
 
136
  let mut frame_specs: Vec<(String, i32, i32, usize)> = Vec::new();
137
  let mut prev_visible_sub = String::new();
138
 
 
161
 
162
  println!("Total animation frames to render: {}", frame_specs.len());
163
 
164
+ // STEP 3: Start FFmpeg process first
 
165
  let mut child = Command::new("ffmpeg")
166
  .arg("-y")
167
  .arg("-f").arg("rawvideo")
 
176
  .arg("-pix_fmt").arg("yuv420p")
177
  .arg(&animation_video_path)
178
  .stdin(Stdio::piped())
 
179
  .spawn()
180
  .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("Failed to spawn FFmpeg: {}", e)))?;
181
 
182
  let mut stdin = child.stdin.take().unwrap();
183
 
184
+ // Parallel rendering using rayon and streaming to FFmpeg
185
+ let animation_frames: Vec<Vec<u8>> = frame_specs
186
+ .par_iter()
187
+ .map(|(visible_sub, pen_x, pen_y, anim_step)| {
188
+ let anim_offset = (*anim_step as f64) / (animation_frames_per_char as f64);
189
+ let frame = render_frame(
190
+ visible_sub,
191
+ *pen_x,
192
+ *pen_y,
193
+ anim_offset,
194
+ width,
195
+ height,
196
+ &line_styles,
197
+ &y_positions,
198
+ margin_x,
199
+ font,
200
+ default_font_scale,
201
+ header_font_scale,
202
+ default_thickness,
203
+ header_thickness,
204
+ default_text_color,
205
+ header_text_color,
206
+ bg_color,
207
+ pen_color,
208
+ pen_tip_radius,
209
+ pen_length,
210
+ pen_thickness,
211
+ pen_base_angle,
212
+ pen_movement_amplitude,
213
+ ).unwrap();
214
+ frame.data_bytes().unwrap().to_vec()
215
+ })
216
+ .collect();
 
 
 
 
 
 
 
 
 
 
217
 
218
  println!("Animation frames rendered in {:.3}s", t0.elapsed().as_secs_f64());
219
 
220
+ // Write all animation frames in one large batch
 
 
221
  let mut buffer: Vec<u8> = Vec::with_capacity(animation_frames.len() * width as usize * height as usize * 3);
 
222
  for frame_data in &animation_frames {
223
  buffer.extend_from_slice(frame_data);
224
  }
225
 
226
+ stdin.write_all(&buffer)
227
+ .map_err(|e| pyo3::exceptions::PyIOError::new_err(format!("Failed to write animation frames: {}", e)))?;
 
 
 
 
 
228
 
229
  drop(stdin);
230
+ child.wait().map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("FFmpeg animation failed: {}", e)))?;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
231
 
232
  println!("Animation video created in {:.3}s", t0.elapsed().as_secs_f64());
233
 
234
+ // STEP 4: Combine animation + static frame + audio using FFmpeg filters
 
 
235
  let animation_actual_duration = frame_specs.len() as f64 / fps;
236
  let speed_multiplier = animation_duration / animation_actual_duration;
237
 
238
+ println!("Combining videos with FFmpeg filters...");
239
+
240
  let filter_complex = format!(
241
+ "[0:v]setpts={}*PTS[v0];[1:v]loop=loop=-1:size=1:start=0,trim=duration={}[v1];[v0][v1]concat=n=2:v=1:a=0[outv]",
242
  speed_multiplier,
243
  static_duration
244
  );
245
 
246
+ let mut combine_child = Command::new("ffmpeg")
247
  .arg("-y")
248
  .arg("-i").arg(&animation_video_path)
249
  .arg("-loop").arg("1")
 
252
  .arg("-filter_complex").arg(&filter_complex)
253
  .arg("-map").arg("[outv]")
254
  .arg("-map").arg("2:a:0")
 
255
  .arg("-c:v").arg("libx264")
256
  .arg("-preset").arg("ultrafast")
257
  .arg("-crf").arg("28")
 
259
  .arg("-c:a").arg("aac")
260
  .arg("-shortest")
261
  .arg(&final_video_path)
262
+ .spawn()
263
+ .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("Failed to spawn FFmpeg for combine: {}", e)))?;
264
+
265
+ combine_child.wait().map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("FFmpeg combine failed: {}", e)))?;
 
 
 
 
 
 
266
 
267
  let elapsed = t0.elapsed().as_secs_f64();
268
  println!("Total processing time: {:.3}s", elapsed);
 
271
  let _ = std::fs::remove_file(&animation_video_path);
272
  let _ = std::fs::remove_file(&static_frame_path);
273
 
 
 
 
 
 
 
 
 
274
  Ok(Some(final_video_path))
275
  }
276