Spaces:
Sleeping
Sleeping
| #!/usr/bin/env python3 | |
| import os | |
| import sys | |
| import subprocess | |
| import gradio as gr | |
| import torch | |
| sys.path.insert(0, '/app/FunGen') | |
| def process_video(video, mode, overwrite, autotune): | |
| """Process video with FunGen""" | |
| print(f"[DEBUG] Called with video={video}, mode={mode}, overwrite={overwrite}, autotune={autotune}") | |
| if video is None: | |
| return "β No video uploaded", None | |
| try: | |
| input_path = video | |
| print(f"[DEBUG] Input path: {input_path}") | |
| if not os.path.exists(input_path): | |
| return f"β File not found: {input_path}", None | |
| file_size_mb = os.path.getsize(input_path) / (1024**2) | |
| status = f"π Processing: {os.path.basename(input_path)}\n" | |
| status += f"π Size: {file_size_mb:.1f} MB\n" | |
| status += f"βοΈ Mode: {mode}\n" | |
| status += f"β³ Starting...\n\n" | |
| print(f"[DEBUG] Status: {status}") | |
| # Build command without fake flags | |
| cmd = ["python", "/app/FunGen/main.py", input_path, "--mode", mode] | |
| if overwrite: | |
| cmd.append("--overwrite") | |
| if not autotune: | |
| cmd.append("--no-autotune") | |
| print(f"[DEBUG] Command: {' '.join(cmd)}") | |
| # Set environment | |
| env = os.environ.copy() | |
| result = subprocess.run(cmd, cwd="/app/FunGen", capture_output=True, text=True, timeout=3600, env=env) | |
| print(f"[DEBUG] Return code: {result.returncode}") | |
| print(f"[DEBUG] STDOUT:\n{result.stdout}") | |
| print(f"[DEBUG] STDERR:\n{result.stderr}") | |
| # Look for output - FunGen saves relative to input or in current directory | |
| output_file = None | |
| search_paths = [ | |
| os.path.dirname(input_path), # Same folder as input | |
| "/app/FunGen", | |
| "/tmp/outputs", | |
| os.getcwd() | |
| ] | |
| for search_path in search_paths: | |
| if os.path.exists(search_path): | |
| print(f"[DEBUG] Searching: {search_path}") | |
| for root, dirs, files in os.walk(search_path): | |
| for f in files: | |
| print(f"[DEBUG] Found file: {os.path.join(root, f)}") | |
| if f.endswith(".funscript") and not f.endswith(".roll.funscript"): | |
| output_file = os.path.join(root, f) | |
| print(f"[DEBUG] β Matched funscript: {output_file}") | |
| break | |
| if output_file: | |
| break | |
| if output_file: | |
| break | |
| if output_file and os.path.exists(output_file): | |
| status += f"β Complete!\nπ {os.path.basename(output_file)}" | |
| print(f"[DEBUG] Returning: {output_file}") | |
| return status, output_file | |
| # Not found - show the full output for debugging | |
| status += f"β οΈ No output generated\n\nFull Output:\n{result.stdout}\n\nErrors:\n{result.stderr}" | |
| return status, None | |
| except Exception as e: | |
| error = f"β Exception: {str(e)}" | |
| print(f"[DEBUG] Exception: {error}") | |
| import traceback | |
| print(traceback.format_exc()) | |
| return error, None | |
| with gr.Blocks(title="FunGen") as demo: | |
| gr.Markdown("# π¬ FunGen - Funscript Generator") | |
| with gr.Row(): | |
| gpu_info = "β GPU Available" if torch.cuda.is_available() else "β No GPU" | |
| gr.Textbox(value=gpu_info, label="Status", interactive=False) | |
| with gr.Row(): | |
| with gr.Column(): | |
| video_input = gr.File(label="Upload Video", file_types=["video"], type="filepath") | |
| mode_input = gr.Dropdown( | |
| ["Hybrid Intelligence Tracker", "Oscillation Detector (Legacy)", "YOLO ROI Tracker"], | |
| value="Hybrid Intelligence Tracker", | |
| label="Mode" | |
| ) | |
| overwrite_input = gr.Checkbox(label="Overwrite", value=False) | |
| autotune_input = gr.Checkbox(label="Apply Autotune", value=True) | |
| process_btn = gr.Button("Process Video", variant="primary") | |
| with gr.Column(): | |
| status_output = gr.Textbox(label="Status", lines=10, interactive=False) | |
| file_output = gr.File(label="Download", interactive=False) | |
| process_btn.click( | |
| fn=process_video, | |
| inputs=[video_input, mode_input, overwrite_input, autotune_input], | |
| outputs=[status_output, file_output] | |
| ) | |
| if __name__ == "__main__": | |
| demo.queue().launch(server_name="0.0.0.0", server_port=7860, show_error=True) | |