File size: 4,632 Bytes
1535bbd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
#!/usr/bin/env python3
import os
import sys
import subprocess
import gradio as gr
import torch

sys.path.insert(0, '/app/FunGen')

def process_video(video, mode, overwrite, autotune):
    """Process video with FunGen"""
    print(f"[DEBUG] Called with video={video}, mode={mode}, overwrite={overwrite}, autotune={autotune}")
    
    if video is None:
        return "❌ No video uploaded", None
    
    try:
        input_path = video
        print(f"[DEBUG] Input path: {input_path}")
        
        if not os.path.exists(input_path):
            return f"❌ File not found: {input_path}", None
        
        file_size_mb = os.path.getsize(input_path) / (1024**2)
        status = f"πŸš€ Processing: {os.path.basename(input_path)}\n"
        status += f"πŸ“ Size: {file_size_mb:.1f} MB\n"
        status += f"βš™οΈ  Mode: {mode}\n"
        status += f"⏳ Starting...\n\n"
        
        print(f"[DEBUG] Status: {status}")
        
        # Build command without fake flags
        cmd = ["python", "/app/FunGen/main.py", input_path, "--mode", mode]
        if overwrite:
            cmd.append("--overwrite")
        if not autotune:
            cmd.append("--no-autotune")
        
        print(f"[DEBUG] Command: {' '.join(cmd)}")
        
        # Set environment
        env = os.environ.copy()
        
        result = subprocess.run(cmd, cwd="/app/FunGen", capture_output=True, text=True, timeout=3600, env=env)
        
        print(f"[DEBUG] Return code: {result.returncode}")
        print(f"[DEBUG] STDOUT:\n{result.stdout}")
        print(f"[DEBUG] STDERR:\n{result.stderr}")
        
        # Look for output - FunGen saves relative to input or in current directory
        output_file = None
        search_paths = [
            os.path.dirname(input_path),  # Same folder as input
            "/app/FunGen",
            "/tmp/outputs",
            os.getcwd()
        ]
        
        for search_path in search_paths:
            if os.path.exists(search_path):
                print(f"[DEBUG] Searching: {search_path}")
                for root, dirs, files in os.walk(search_path):
                    for f in files:
                        print(f"[DEBUG] Found file: {os.path.join(root, f)}")
                        if f.endswith(".funscript") and not f.endswith(".roll.funscript"):
                            output_file = os.path.join(root, f)
                            print(f"[DEBUG] βœ“ Matched funscript: {output_file}")
                            break
                    if output_file:
                        break
            if output_file:
                break
        
        if output_file and os.path.exists(output_file):
            status += f"βœ… Complete!\nπŸ“œ {os.path.basename(output_file)}"
            print(f"[DEBUG] Returning: {output_file}")
            return status, output_file
        
        # Not found - show the full output for debugging
        status += f"⚠️ No output generated\n\nFull Output:\n{result.stdout}\n\nErrors:\n{result.stderr}"
        return status, None
    
    except Exception as e:
        error = f"❌ Exception: {str(e)}"
        print(f"[DEBUG] Exception: {error}")
        import traceback
        print(traceback.format_exc())
        return error, None

with gr.Blocks(title="FunGen") as demo:
    gr.Markdown("# 🎬 FunGen - Funscript Generator")
    
    with gr.Row():
        gpu_info = "βœ… GPU Available" if torch.cuda.is_available() else "❌ No GPU"
        gr.Textbox(value=gpu_info, label="Status", interactive=False)
    
    with gr.Row():
        with gr.Column():
            video_input = gr.File(label="Upload Video", file_types=["video"], type="filepath")
            mode_input = gr.Dropdown(
                ["Hybrid Intelligence Tracker", "Oscillation Detector (Legacy)", "YOLO ROI Tracker"],
                value="Hybrid Intelligence Tracker",
                label="Mode"
            )
            overwrite_input = gr.Checkbox(label="Overwrite", value=False)
            autotune_input = gr.Checkbox(label="Apply Autotune", value=True)
            process_btn = gr.Button("Process Video", variant="primary")
        
        with gr.Column():
            status_output = gr.Textbox(label="Status", lines=10, interactive=False)
            file_output = gr.File(label="Download", interactive=False)
    
    process_btn.click(
        fn=process_video,
        inputs=[video_input, mode_input, overwrite_input, autotune_input],
        outputs=[status_output, file_output]
    )

if __name__ == "__main__":
    demo.queue().launch(server_name="0.0.0.0", server_port=7860, show_error=True)