File size: 3,668 Bytes
0c2cc02
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
import gradio as gr
import requests
import os
from PIL import Image
import numpy as np
import cv2
import tempfile

def process_video(video_path, lora_url, lora_strength, enable_fflf, blend_strength, seed, steps, cfg_scale, width, height, frames):
    # Download LoRA from URL
    if lora_url:
        try:
            response = requests.get(lora_url)
            lora_path = os.path.join(tempfile.gettempdir(), "downloaded_lora.safetensors")
            with open(lora_path, 'wb') as f:
                f.write(response.content)
        except Exception as e:
            return f"Error downloading LoRA: {str(e)}"
    
    # Process video with FFLF if enabled
    if enable_fflf and video_path:
        try:
            # Extract last frame
            cap = cv2.VideoCapture(video_path)
            total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
            cap.set(cv2.CAP_PROP_POS_FRAMES, total_frames - 1)
            ret, last_frame = cap.read()
            cap.release()
            
            if ret:
                # Blend with first frame if needed
                cap = cv2.VideoCapture(video_path)
                ret, first_frame = cap.read()
                cap.release()
                
                if blend_strength > 0:
                    blended_frame = cv2.addWeighted(
                        first_frame, 1 - blend_strength,
                        last_frame, blend_strength,
                        0
                    )
                    last_frame = blended_frame
                
                # Here you would integrate with WAN 2.2 generation
                # For demo purposes, we'll just return the processed frame
                return last_frame
        except Exception as e:
            return f"Error processing video: {str(e)}"
    
    # Placeholder for actual generation
    return "Generation complete with LoRA strength: " + str(lora_strength)

with gr.Blocks(theme=gr.themes.Soft()) as demo:
    gr.Markdown("# WANderFrame - LoRA Linker")
    
    with gr.Row():
        with gr.Column():
            # LoRA Configuration
            with gr.Group():
                gr.Markdown("## LoRA Configuration")
                lora_url = gr.Textbox(label="LoRA URL", placeholder="https://example.com/lora.safetensors")
                lora_strength = gr.Slider(0, 1, value=0.7, label="LoRA Strength")
            
            # FFLF Configuration
            with gr.Group():
                gr.Markdown("## FFLF Configuration")
                enable_fflf = gr.Checkbox(label="Enable First Frame Last Frame", value=True)
                blend_strength = gr.Slider(0, 1, value=0.5, label="Frame Blend Strength")
                input_video = gr.Video(label="Input Video (for FFLF)")
            
        with gr.Column():
            # Generation Parameters
            with gr.Group():
                gr.Markdown("## Generation Parameters")
                seed = gr.Number(label="Seed", value=-1)
                steps = gr.Number(label="Steps", value=30)
                cfg_scale = gr.Number(label="CFG Scale", value=7.5)
                width = gr.Number(label="Width", value=512)
                height = gr.Number(label="Height", value=512)
                frames = gr.Number(label="Frames", value=24)
            
            # Output
            output = gr.Textbox(label="Output", interactive=False)
            generate_btn = gr.Button("Generate", variant="primary")
    
    generate_btn.click(
        fn=process_video,
        inputs=[input_video, lora_url, lora_strength, enable_fflf, blend_strength, seed, steps, cfg_scale, width, height, frames],
        outputs=output
    )

demo.launch()