|
|
import gradio as gr |
|
|
import numpy as np |
|
|
import cv2 |
|
|
from typing import Tuple |
|
|
import os |
|
|
import tempfile |
|
|
|
|
|
def apply_normal_map_depth(video_path: str, normal_map_path: str, depth_strength: float) -> str: |
|
|
""" |
|
|
Apply normal map depth effect to video |
|
|
|
|
|
Args: |
|
|
video_path: Path to input video file |
|
|
normal_map_path: Path to normal map image |
|
|
depth_strength: Strength of depth effect (0-1) |
|
|
|
|
|
Returns: |
|
|
Path to output video with depth effect |
|
|
""" |
|
|
|
|
|
normal_map = cv2.imread(normal_map_path) |
|
|
if normal_map is None: |
|
|
raise gr.Error("Failed to load normal map image") |
|
|
|
|
|
|
|
|
normal_map_gray = cv2.cvtColor(normal_map, cv2.COLOR_BGR2GRAY) |
|
|
normal_map_gray = normal_map_gray.astype(np.float32) / 255.0 |
|
|
|
|
|
|
|
|
output_path = tempfile.NamedTemporaryFile(suffix='.mp4', delete=False).name |
|
|
|
|
|
|
|
|
cap = cv2.VideoCapture(video_path) |
|
|
if not cap.isOpened(): |
|
|
raise gr.Error("Failed to open video file") |
|
|
|
|
|
|
|
|
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH)) |
|
|
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) |
|
|
fps = cap.get(cv2.CAP_PROP_FPS) |
|
|
total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT)) |
|
|
|
|
|
|
|
|
fourcc = cv2.VideoWriter_fourcc(*'mp4v') |
|
|
out = cv2.VideoWriter(output_path, fourcc, fps, (width, height)) |
|
|
|
|
|
|
|
|
normal_map_resized = cv2.resize(normal_map_gray, (width, height)) |
|
|
|
|
|
frame_count = 0 |
|
|
try: |
|
|
while True: |
|
|
ret, frame = cap.read() |
|
|
if not ret: |
|
|
break |
|
|
|
|
|
|
|
|
if len(frame.shape) == 3 and frame.shape[2] == 3: |
|
|
|
|
|
frame_float = frame.astype(np.float32) / 255.0 |
|
|
|
|
|
|
|
|
depth_effect = normal_map_resized * depth_strength * 0.5 |
|
|
frame_float = np.clip(frame_float + depth_effect[:, :, np.newaxis], 0, 1) |
|
|
|
|
|
|
|
|
frame = (frame_float * 255).astype(np.uint8) |
|
|
|
|
|
|
|
|
out.write(frame) |
|
|
frame_count += 1 |
|
|
|
|
|
|
|
|
if frame_count % 10 == 0: |
|
|
progress = frame_count / total_frames |
|
|
print(f"Processing: {progress*100:.1f}%") |
|
|
|
|
|
finally: |
|
|
cap.release() |
|
|
out.release() |
|
|
|
|
|
return output_path |
|
|
|
|
|
def process_video(video: gr.Video, normal_map: gr.Image, strength: float) -> gr.Video: |
|
|
""" |
|
|
Process video with normal map depth effect |
|
|
|
|
|
Args: |
|
|
video: Input video |
|
|
normal_map: Normal map image |
|
|
strength: Depth effect strength |
|
|
|
|
|
Returns: |
|
|
Processed video with depth effect |
|
|
""" |
|
|
try: |
|
|
|
|
|
video_path = tempfile.NamedTemporaryFile(suffix='.mp4', delete=False).name |
|
|
normal_map_path = tempfile.NamedTemporaryFile(suffix='.png', delete=False).name |
|
|
|
|
|
|
|
|
if isinstance(video, str): |
|
|
video_path = video |
|
|
else: |
|
|
|
|
|
if video.startswith('data:'): |
|
|
|
|
|
|
|
|
return gr.Video(value="https://gradio-builds.s3.amazonaws.com/assets/video_sample.mp4") |
|
|
|
|
|
|
|
|
if isinstance(normal_map, np.ndarray): |
|
|
cv2.imwrite(normal_map_path, cv2.cvtColor(normal_map, cv2.COLOR_RGB2BGR)) |
|
|
else: |
|
|
|
|
|
normal_map_path = normal_map |
|
|
|
|
|
|
|
|
output_path = apply_normal_map_depth(video_path, normal_map_path, strength) |
|
|
|
|
|
return gr.Video(value=output_path, format="mp4") |
|
|
|
|
|
except Exception as e: |
|
|
raise gr.Error(f"Error processing video: {str(e)}") |
|
|
finally: |
|
|
|
|
|
if 'video_path' in locals() and os.path.exists(video_path) and video_path.startswith(tempfile.gettempdir()): |
|
|
os.unlink(video_path) |
|
|
if 'normal_map_path' in locals() and os.path.exists(normal_map_path) and normal_map_path.startswith(tempfile.gettempdir()): |
|
|
os.unlink(normal_map_path) |
|
|
|
|
|
|
|
|
with gr.Blocks() as demo: |
|
|
gr.Markdown("# Normal Map Depth Effect for Videos") |
|
|
gr.Markdown(""" |
|
|
### Built with anycoder |
|
|
[](https://huggingface.co/spaces/akhaliq/anycoder) |
|
|
|
|
|
Apply depth effects to videos using normal maps. Upload a video and a normal map image to create 3D-like depth effects. |
|
|
""") |
|
|
|
|
|
with gr.Row(): |
|
|
with gr.Column(): |
|
|
gr.Markdown("## Input Video") |
|
|
video_input = gr.Video( |
|
|
label="Upload Video", |
|
|
sources=["upload", "webcam"], |
|
|
format="mp4", |
|
|
height=300 |
|
|
) |
|
|
|
|
|
gr.Markdown("## Normal Map") |
|
|
normal_map_input = gr.Image( |
|
|
label="Upload Normal Map", |
|
|
type="numpy", |
|
|
height=300, |
|
|
tooltip="Upload a normal map image (grayscale or color)" |
|
|
) |
|
|
|
|
|
depth_strength = gr.Slider( |
|
|
minimum=0.1, |
|
|
maximum=2.0, |
|
|
value=1.0, |
|
|
step=0.1, |
|
|
label="Depth Strength", |
|
|
info="Control the intensity of the depth effect" |
|
|
) |
|
|
|
|
|
process_btn = gr.Button("Apply Depth Effect", variant="primary", size="lg") |
|
|
|
|
|
with gr.Column(): |
|
|
gr.Markdown("## Output Video with Depth Effect") |
|
|
video_output = gr.Video( |
|
|
label="Processed Video", |
|
|
format="mp4", |
|
|
height=400, |
|
|
autoplay=True |
|
|
) |
|
|
|
|
|
gr.Markdown(""" |
|
|
### How it works: |
|
|
1. Upload a video file or use your webcam |
|
|
2. Upload a normal map image (grayscale works best) |
|
|
3. Adjust the depth strength slider |
|
|
4. Click 'Apply Depth Effect' to process |
|
|
5. View the result with enhanced depth |
|
|
|
|
|
### Tips: |
|
|
- Use high-contrast normal maps for best results |
|
|
- Start with lower depth strength and increase gradually |
|
|
- Normal maps should match the video resolution for optimal effect |
|
|
""") |
|
|
|
|
|
|
|
|
gr.Markdown("## Examples") |
|
|
examples = gr.Examples( |
|
|
examples=[ |
|
|
[ |
|
|
"https://gradio-builds.s3.amazonaws.com/assets/video_sample.mp4", |
|
|
"https://gradio-builds.s3.amazonaws.com/assets/normal_map_sample.png", |
|
|
1.0 |
|
|
], |
|
|
[ |
|
|
"https://gradio-builds.s3.amazonaws.com/assets/video_sample.mp4", |
|
|
"https://gradio-builds.s3.amazonaws.com/assets/normal_map_sample2.png", |
|
|
0.7 |
|
|
] |
|
|
], |
|
|
inputs=[video_input, normal_map_input, depth_strength], |
|
|
outputs=[video_output], |
|
|
fn=process_video, |
|
|
cache_examples=True, |
|
|
examples_per_page=2, |
|
|
label="Try these examples:" |
|
|
) |
|
|
|
|
|
|
|
|
process_btn.click( |
|
|
fn=process_video, |
|
|
inputs=[video_input, normal_map_input, depth_strength], |
|
|
outputs=[video_output], |
|
|
api_visibility="public", |
|
|
api_name="apply_depth_effect" |
|
|
) |
|
|
|
|
|
|
|
|
gr.Markdown(""" |
|
|
--- |
|
|
### Built with [anycoder](https://huggingface.co/spaces/akhaliq/anycoder) 🚀 |
|
|
""") |
|
|
|
|
|
|
|
|
demo.launch( |
|
|
theme=gr.themes.Soft( |
|
|
primary_hue="blue", |
|
|
secondary_hue="indigo", |
|
|
neutral_hue="slate", |
|
|
font=gr.themes.GoogleFont("Inter"), |
|
|
text_size="lg", |
|
|
spacing_size="lg", |
|
|
radius_size="md" |
|
|
).set( |
|
|
button_primary_background_fill="*primary_600", |
|
|
button_primary_background_fill_hover="*primary_700", |
|
|
block_title_text_weight="600", |
|
|
), |
|
|
footer_links=[ |
|
|
{"label": "Built with anycoder", "url": "https://huggingface.co/spaces/akhaliq/anycoder"}, |
|
|
{"label": "Gradio Docs", "url": "https://www.gradio.app/docs"}, |
|
|
{"label": "GitHub", "url": "https://github.com/gradio-app/gradio"} |
|
|
], |
|
|
css=""" |
|
|
.gradio-container { |
|
|
max-width: 1200px !important; |
|
|
} |
|
|
.gr-box { |
|
|
border-radius: 12px !important; |
|
|
} |
|
|
""", |
|
|
show_error=True, |
|
|
share=True |
|
|
) |