Spaces:
Running
on
Zero
Running
on
Zero
| import gradio as gr | |
| import numpy as np | |
| import random | |
| import torch | |
| import spaces | |
| from PIL import Image | |
| from diffusers import FlowMatchEulerDiscreteScheduler, QwenImageEditPlusPipeline | |
| MAX_SEED = np.iinfo(np.int32).max | |
| # --- Model Loading --- | |
| dtype = torch.bfloat16 | |
| device = "cuda" if torch.cuda.is_available() else "cpu" | |
| pipe = QwenImageEditPlusPipeline.from_pretrained( | |
| "Qwen/Qwen-Image-Edit-2511", | |
| torch_dtype=dtype | |
| ).to(device) | |
| # Enable VAE tiling to reduce memory usage | |
| pipe.vae.enable_tiling() | |
| # Load the lightning LoRA for fast inference | |
| pipe.load_lora_weights( | |
| "lightx2v/Qwen-Image-Edit-2511-Lightning", | |
| weight_name="Qwen-Image-Edit-2511-Lightning-4steps-V1.0-bf16.safetensors", | |
| adapter_name="lightning" | |
| ) | |
| # Load the multi-angles LoRA | |
| pipe.load_lora_weights( | |
| "fal/Qwen-Image-Edit-2511-Multiple-Angles-LoRA", | |
| weight_name="qwen-image-edit-2511-multiple-angles-lora.safetensors", | |
| adapter_name="angles" | |
| ) | |
| pipe.set_adapters(["lightning", "angles"], adapter_weights=[1.0, 1.0]) | |
| # --- Prompt Building --- | |
| # Azimuth mappings (8 positions) | |
| AZIMUTH_MAP = { | |
| 0: "front view", | |
| 45: "front-right quarter view", | |
| 90: "right side view", | |
| 135: "back-right quarter view", | |
| 180: "back view", | |
| 225: "back-left quarter view", | |
| 270: "left side view", | |
| 315: "front-left quarter view" | |
| } | |
| # Elevation mappings (4 positions) | |
| ELEVATION_MAP = { | |
| -30: "low-angle shot", | |
| 0: "eye-level shot", | |
| 30: "elevated shot", | |
| 60: "high-angle shot" | |
| } | |
| # Distance mappings (3 positions) | |
| DISTANCE_MAP = { | |
| 0.6: "close-up", | |
| 1.0: "medium shot", | |
| 1.8: "wide shot" | |
| } | |
| def snap_to_nearest(value, options): | |
| """Snap a value to the nearest option in a list.""" | |
| return min(options, key=lambda x: abs(x - value)) | |
| def build_camera_prompt(azimuth: float, elevation: float, distance: float) -> str: | |
| """ | |
| Build a camera prompt from azimuth, elevation, and distance values. | |
| """ | |
| azimuth_snapped = snap_to_nearest(azimuth, list(AZIMUTH_MAP.keys())) | |
| elevation_snapped = snap_to_nearest(elevation, list(ELEVATION_MAP.keys())) | |
| distance_snapped = snap_to_nearest(distance, list(DISTANCE_MAP.keys())) | |
| azimuth_name = AZIMUTH_MAP[azimuth_snapped] | |
| elevation_name = ELEVATION_MAP[elevation_snapped] | |
| distance_name = DISTANCE_MAP[distance_snapped] | |
| return f"<sks> {azimuth_name} {elevation_name} {distance_name}" | |
| def infer_camera_edit( | |
| image: Image.Image, | |
| azimuth: float = 0.0, | |
| elevation: float = 0.0, | |
| distance: float = 1.0, | |
| seed: int = 0, | |
| randomize_seed: bool = True, | |
| guidance_scale: float = 1.0, | |
| num_inference_steps: int = 4, | |
| height: int = 768, | |
| width: int = 768, | |
| ): | |
| """ | |
| Edit the camera angle of an image using Qwen Image Edit 2511 with multi-angles LoRA. | |
| """ | |
| progress = gr.Progress(track_tqdm=True) | |
| prompt = build_camera_prompt(azimuth, elevation, distance) | |
| print(f"Generated Prompt: {prompt}") | |
| if randomize_seed: | |
| seed = random.randint(0, MAX_SEED) | |
| generator = torch.Generator(device=device).manual_seed(seed) | |
| if image is None: | |
| raise gr.Error("Please upload an image first.") | |
| pil_image = image.convert("RGB") if isinstance(image, Image.Image) else Image.open(image).convert("RGB") | |
| # Ensure dimensions are multiples of 8 | |
| height = (height // 8) * 8 | |
| width = (width // 8) * 8 | |
| # Limit max resolution to prevent OOM | |
| max_dim = 2048 | |
| if height > max_dim or width > max_dim: | |
| scale = max_dim / max(height, width) | |
| height = int(height * scale) | |
| width = int(width * scale) | |
| height = (height // 8) * 8 | |
| width = (width // 8) * 8 | |
| result = pipe( | |
| image=[pil_image], | |
| prompt=prompt, | |
| height=height, | |
| width=width, | |
| num_inference_steps=num_inference_steps, | |
| generator=generator, | |
| guidance_scale=guidance_scale, | |
| num_images_per_prompt=1, | |
| ).images[0] | |
| return result, seed, prompt | |
| def update_dimensions_on_upload(image): | |
| """Compute recommended dimensions preserving aspect ratio.""" | |
| if image is None: | |
| return 768, 768 | |
| original_width, original_height = image.size | |
| max_dim = 768 # Reduced to prevent OOM | |
| if original_width > original_height: | |
| new_width = max_dim | |
| aspect_ratio = original_height / original_width | |
| new_height = int(new_width * aspect_ratio) | |
| else: | |
| new_height = max_dim | |
| aspect_ratio = original_width / original_height | |
| new_width = int(new_height * aspect_ratio) | |
| new_width = (new_width // 8) * 8 | |
| new_height = (new_height // 8) * 8 | |
| return new_width, new_height | |
| # --- Comic Style CSS (์๋ณธ + HOME ๋ฒํผ ์คํ์ผ ์ถ๊ฐ) --- | |
| COMIC_CSS = """ | |
| @import url('https://fonts.googleapis.com/css2?family=Bangers&family=Comic+Neue:wght@400;700&display=swap'); | |
| .gradio-container { | |
| background-color: #FEF9C3 !important; | |
| background-image: radial-gradient(#1F2937 1px, transparent 1px) !important; | |
| background-size: 20px 20px !important; | |
| min-height: 100vh !important; | |
| font-family: 'Comic Neue', cursive, sans-serif !important; | |
| } | |
| footer, .footer, .gradio-container footer, .built-with, [class*="footer"], .gradio-footer, a[href*="gradio.app"] { | |
| display: none !important; | |
| visibility: hidden !important; | |
| height: 0 !important; | |
| } | |
| /* HOME Button Style - ์ถ๊ฐ๋จ */ | |
| .home-button-container { | |
| display: flex; | |
| justify-content: center; | |
| align-items: center; | |
| gap: 15px; | |
| margin-bottom: 15px; | |
| padding: 12px 20px; | |
| background: linear-gradient(135deg, #10B981 0%, #059669 100%); | |
| border: 4px solid #1F2937; | |
| border-radius: 12px; | |
| box-shadow: 6px 6px 0 #1F2937; | |
| } | |
| .home-button { | |
| display: inline-flex; | |
| align-items: center; | |
| gap: 8px; | |
| padding: 10px 25px; | |
| background: linear-gradient(135deg, #FACC15 0%, #F59E0B 100%); | |
| color: #1F2937; | |
| font-family: 'Bangers', cursive; | |
| font-size: 1.4rem; | |
| letter-spacing: 2px; | |
| text-decoration: none; | |
| border: 3px solid #1F2937; | |
| border-radius: 8px; | |
| box-shadow: 4px 4px 0 #1F2937; | |
| transition: all 0.2s ease; | |
| } | |
| .home-button:hover { | |
| background: linear-gradient(135deg, #FDE047 0%, #FACC15 100%); | |
| transform: translate(-2px, -2px); | |
| box-shadow: 6px 6px 0 #1F2937; | |
| } | |
| .home-button:active { | |
| transform: translate(2px, 2px); | |
| box-shadow: 2px 2px 0 #1F2937; | |
| } | |
| .url-display { | |
| font-family: 'Comic Neue', cursive; | |
| font-size: 1.1rem; | |
| font-weight: 700; | |
| color: #FFF; | |
| background: rgba(0,0,0,0.3); | |
| padding: 8px 16px; | |
| border-radius: 6px; | |
| border: 2px solid rgba(255,255,255,0.3); | |
| } | |
| .header-container { | |
| text-align: center; | |
| padding: 25px 20px; | |
| background: linear-gradient(135deg, #3B82F6 0%, #8B5CF6 100%); | |
| border: 4px solid #1F2937; | |
| border-radius: 12px; | |
| margin-bottom: 20px; | |
| box-shadow: 8px 8px 0 #1F2937; | |
| position: relative; | |
| } | |
| .header-title { | |
| font-family: 'Bangers', cursive !important; | |
| color: #FFF !important; | |
| font-size: 2.8rem !important; | |
| text-shadow: 3px 3px 0 #1F2937 !important; | |
| letter-spacing: 3px !important; | |
| margin: 0 !important; | |
| } | |
| .header-subtitle { | |
| font-family: 'Comic Neue', cursive !important; | |
| font-size: 1.1rem !important; | |
| color: #FEF9C3 !important; | |
| margin-top: 8px !important; | |
| font-weight: 700 !important; | |
| } | |
| .stats-badge { | |
| display: inline-block; | |
| background: #FACC15; | |
| color: #1F2937; | |
| padding: 6px 14px; | |
| border-radius: 20px; | |
| font-size: 0.9rem; | |
| margin: 3px; | |
| font-weight: 700; | |
| border: 2px solid #1F2937; | |
| box-shadow: 2px 2px 0 #1F2937; | |
| } | |
| .gr-panel, .gr-box, .gr-form, .block, .gr-group { | |
| background: #FFF !important; | |
| border: 3px solid #1F2937 !important; | |
| border-radius: 8px !important; | |
| box-shadow: 5px 5px 0 #1F2937 !important; | |
| } | |
| .gr-button-primary, button.primary, .gr-button.primary { | |
| background: linear-gradient(135deg, #EF4444 0%, #F97316 100%) !important; | |
| border: 3px solid #1F2937 !important; | |
| border-radius: 8px !important; | |
| color: #FFF !important; | |
| font-family: 'Bangers', cursive !important; | |
| font-size: 1.3rem !important; | |
| letter-spacing: 2px !important; | |
| padding: 12px 24px !important; | |
| box-shadow: 4px 4px 0 #1F2937 !important; | |
| text-shadow: 1px 1px 0 #1F2937 !important; | |
| transition: all 0.2s ease !important; | |
| } | |
| .gr-button-primary:hover, button.primary:hover { | |
| background: linear-gradient(135deg, #DC2626 0%, #EA580C 100%) !important; | |
| transform: translate(-2px, -2px) !important; | |
| box-shadow: 6px 6px 0 #1F2937 !important; | |
| } | |
| .gr-button-primary:active, button.primary:active { | |
| transform: translate(2px, 2px) !important; | |
| box-shadow: 2px 2px 0 #1F2937 !important; | |
| } | |
| textarea, input[type="text"], input[type="number"] { | |
| background: #FFF !important; | |
| border: 3px solid #1F2937 !important; | |
| border-radius: 8px !important; | |
| color: #1F2937 !important; | |
| font-family: 'Comic Neue', cursive !important; | |
| font-weight: 700 !important; | |
| } | |
| textarea:focus, input[type="text"]:focus { | |
| border-color: #3B82F6 !important; | |
| box-shadow: 3px 3px 0 #3B82F6 !important; | |
| } | |
| .info-box { | |
| background: linear-gradient(135deg, #FACC15 0%, #FDE047 100%) !important; | |
| border: 3px solid #1F2937 !important; | |
| border-radius: 8px !important; | |
| padding: 12px 15px !important; | |
| margin: 10px 0 !important; | |
| box-shadow: 4px 4px 0 #1F2937 !important; | |
| font-family: 'Comic Neue', cursive !important; | |
| font-weight: 700 !important; | |
| color: #1F2937 !important; | |
| } | |
| .result-box textarea { | |
| background: #1F2937 !important; | |
| color: #10B981 !important; | |
| font-family: 'Courier New', monospace !important; | |
| border: 3px solid #10B981 !important; | |
| border-radius: 8px !important; | |
| box-shadow: 4px 4px 0 #10B981 !important; | |
| } | |
| label, .gr-input-label, .gr-block-label { | |
| color: #1F2937 !important; | |
| font-family: 'Comic Neue', cursive !important; | |
| font-weight: 700 !important; | |
| } | |
| .gr-accordion { | |
| background: #E0F2FE !important; | |
| border: 3px solid #1F2937 !important; | |
| border-radius: 8px !important; | |
| box-shadow: 4px 4px 0 #1F2937 !important; | |
| } | |
| .tab-nav button { | |
| font-family: 'Comic Neue', cursive !important; | |
| font-weight: 700 !important; | |
| border: 2px solid #1F2937 !important; | |
| margin: 2px !important; | |
| } | |
| .tab-nav button.selected { | |
| background: #3B82F6 !important; | |
| color: #FFF !important; | |
| box-shadow: 3px 3px 0 #1F2937 !important; | |
| } | |
| .footer-comic { | |
| text-align: center; | |
| padding: 20px; | |
| background: linear-gradient(135deg, #3B82F6 0%, #8B5CF6 100%); | |
| border: 4px solid #1F2937; | |
| border-radius: 12px; | |
| margin-top: 20px; | |
| box-shadow: 6px 6px 0 #1F2937; | |
| } | |
| .footer-comic p { | |
| font-family: 'Comic Neue', cursive !important; | |
| color: #FFF !important; | |
| margin: 5px 0 !important; | |
| font-weight: 700 !important; | |
| } | |
| ::-webkit-scrollbar { | |
| width: 12px; | |
| height: 12px; | |
| } | |
| ::-webkit-scrollbar-track { | |
| background: #FEF9C3; | |
| border: 2px solid #1F2937; | |
| } | |
| ::-webkit-scrollbar-thumb { | |
| background: #3B82F6; | |
| border: 2px solid #1F2937; | |
| border-radius: 6px; | |
| } | |
| ::-webkit-scrollbar-thumb:hover { | |
| background: #EF4444; | |
| } | |
| ::selection { | |
| background: #FACC15; | |
| color: #1F2937; | |
| } | |
| /* 3D Camera Control Styling */ | |
| #camera-3d-control { | |
| min-height: 450px; | |
| border: 3px solid #1F2937 !important; | |
| border-radius: 12px !important; | |
| box-shadow: 5px 5px 0 #1F2937 !important; | |
| overflow: hidden; | |
| } | |
| /* Slider Styling */ | |
| input[type="range"] { | |
| accent-color: #3B82F6; | |
| } | |
| .gr-slider input[type="range"]::-webkit-slider-thumb { | |
| background: #EF4444 !important; | |
| border: 2px solid #1F2937 !important; | |
| } | |
| /* Image Container */ | |
| .gr-image { | |
| border: 3px solid #1F2937 !important; | |
| border-radius: 8px !important; | |
| box-shadow: 4px 4px 0 #1F2937 !important; | |
| } | |
| /* Control Section */ | |
| .control-section { | |
| background: linear-gradient(135deg, #E0F2FE 0%, #DBEAFE 100%) !important; | |
| border: 3px solid #1F2937 !important; | |
| border-radius: 12px !important; | |
| padding: 15px !important; | |
| margin: 10px 0 !important; | |
| box-shadow: 4px 4px 0 #1F2937 !important; | |
| } | |
| /* Hide Hugging Face elements */ | |
| .huggingface-space-link, | |
| a[href*="huggingface.co/spaces"], | |
| button[class*="share"], | |
| .share-button, | |
| [class*="hf-logo"], | |
| .gr-share-btn, | |
| #hf-logo, | |
| .hf-icon, | |
| svg[class*="hf"], | |
| div[class*="huggingface"], | |
| a[class*="huggingface"], | |
| .svelte-1rjryqp, | |
| header a[href*="huggingface"], | |
| .space-header, | |
| div.absolute.right-0 a[href*="huggingface"], | |
| .gr-group > a[href*="huggingface"], | |
| a[target="_blank"][href*="huggingface.co"] { | |
| display: none !important; | |
| visibility: hidden !important; | |
| opacity: 0 !important; | |
| pointer-events: none !important; | |
| width: 0 !important; | |
| height: 0 !important; | |
| overflow: hidden !important; | |
| } | |
| /* Quality Badge */ | |
| .quality-badge { | |
| display: inline-block; | |
| background: linear-gradient(135deg, #10B981 0%, #059669 100%); | |
| color: white; | |
| padding: 4px 12px; | |
| border-radius: 15px; | |
| font-size: 0.8rem; | |
| font-weight: bold; | |
| border: 2px solid #1F2937; | |
| margin-left: 8px; | |
| } | |
| #col-container { | |
| max-width: 1200px; | |
| margin: 0 auto; | |
| } | |
| .dark .progress-text { | |
| color: white !important; | |
| } | |
| """ | |
| # --- 3D Camera Control Component (elevation ํธ๋ค ๊ฐ์ ) --- | |
| class CameraControl3D(gr.HTML): | |
| """ | |
| A 3D camera control component using Three.js. | |
| """ | |
| def __init__(self, value=None, imageUrl=None, **kwargs): | |
| if value is None: | |
| value = {"azimuth": 0, "elevation": 0, "distance": 1.0} | |
| html_template = """ | |
| <div id="camera-control-wrapper" style="width: 100%; height: 450px; position: relative; background: linear-gradient(135deg, #1a1a2e 0%, #16213e 100%); border-radius: 12px; overflow: hidden;"> | |
| <div id="prompt-overlay" style="position: absolute; bottom: 10px; left: 50%; transform: translateX(-50%); background: rgba(0,0,0,0.85); padding: 10px 20px; border-radius: 10px; font-family: 'Bangers', monospace; font-size: 14px; color: #00ff88; white-space: nowrap; z-index: 10; border: 2px solid #00ff88; box-shadow: 0 0 15px rgba(0,255,136,0.3);"></div> | |
| <div id="control-hints" style="position: absolute; top: 10px; left: 10px; background: rgba(0,0,0,0.75); padding: 8px 12px; border-radius: 8px; font-family: 'Comic Neue', sans-serif; font-size: 11px; color: #fff; z-index: 10; border: 2px solid #3B82F6;"> | |
| ๐ข Azimuth ๐ฉท Elevation (โ๋๋๊ทธ) ๐ Distance | |
| </div> | |
| </div> | |
| """ | |
| js_on_load = """ | |
| (() => { | |
| const wrapper = element.querySelector('#camera-control-wrapper'); | |
| const promptOverlay = element.querySelector('#prompt-overlay'); | |
| const initScene = () => { | |
| if (typeof THREE === 'undefined') { | |
| setTimeout(initScene, 100); | |
| return; | |
| } | |
| const scene = new THREE.Scene(); | |
| scene.background = new THREE.Color(0x1a1a2e); | |
| const camera = new THREE.PerspectiveCamera(50, wrapper.clientWidth / wrapper.clientHeight, 0.1, 1000); | |
| camera.position.set(4.5, 3, 4.5); | |
| camera.lookAt(0, 0.75, 0); | |
| const renderer = new THREE.WebGLRenderer({ antialias: true }); | |
| renderer.setSize(wrapper.clientWidth, wrapper.clientHeight); | |
| renderer.setPixelRatio(Math.min(window.devicePixelRatio, 2)); | |
| wrapper.insertBefore(renderer.domElement, promptOverlay); | |
| scene.add(new THREE.AmbientLight(0xffffff, 0.6)); | |
| const dirLight = new THREE.DirectionalLight(0xffffff, 0.6); | |
| dirLight.position.set(5, 10, 5); | |
| scene.add(dirLight); | |
| const gridHelper = new THREE.GridHelper(8, 16, 0x3B82F6, 0x1e3a5f); | |
| scene.add(gridHelper); | |
| const CENTER = new THREE.Vector3(0, 0.75, 0); | |
| const BASE_DISTANCE = 1.6; | |
| const AZIMUTH_RADIUS = 2.4; | |
| const ELEVATION_RADIUS = 1.8; | |
| let azimuthAngle = props.value?.azimuth || 0; | |
| let elevationAngle = props.value?.elevation || 0; | |
| let distanceFactor = props.value?.distance || 1.0; | |
| const azimuthSteps = [0, 45, 90, 135, 180, 225, 270, 315]; | |
| const elevationSteps = [-30, 0, 30, 60]; | |
| const distanceSteps = [0.6, 1.0, 1.4]; | |
| const azimuthNames = { | |
| 0: 'front view', 45: 'front-right quarter view', 90: 'right side view', | |
| 135: 'back-right quarter view', 180: 'back view', 225: 'back-left quarter view', | |
| 270: 'left side view', 315: 'front-left quarter view' | |
| }; | |
| const elevationNames = { '-30': 'low-angle shot', '0': 'eye-level shot', '30': 'elevated shot', '60': 'high-angle shot' }; | |
| const distanceNames = { '0.6': 'close-up', '1': 'medium shot', '1.4': 'wide shot' }; | |
| function snapToNearest(value, steps) { | |
| return steps.reduce((prev, curr) => Math.abs(curr - value) < Math.abs(prev - value) ? curr : prev); | |
| } | |
| function createPlaceholderTexture() { | |
| const canvas = document.createElement('canvas'); | |
| canvas.width = 256; | |
| canvas.height = 256; | |
| const ctx = canvas.getContext('2d'); | |
| const gradient = ctx.createLinearGradient(0, 0, 256, 256); | |
| gradient.addColorStop(0, '#3B82F6'); | |
| gradient.addColorStop(1, '#8B5CF6'); | |
| ctx.fillStyle = gradient; | |
| ctx.fillRect(0, 0, 256, 256); | |
| ctx.fillStyle = '#FEF9C3'; | |
| ctx.beginPath(); | |
| ctx.arc(128, 100, 50, 0, Math.PI * 2); | |
| ctx.fill(); | |
| ctx.fillStyle = '#1F2937'; | |
| ctx.beginPath(); | |
| ctx.arc(110, 90, 8, 0, Math.PI * 2); | |
| ctx.arc(146, 90, 8, 0, Math.PI * 2); | |
| ctx.fill(); | |
| ctx.strokeStyle = '#1F2937'; | |
| ctx.lineWidth = 4; | |
| ctx.beginPath(); | |
| ctx.arc(128, 105, 25, 0.2, Math.PI - 0.2); | |
| ctx.stroke(); | |
| ctx.fillStyle = '#FFF'; | |
| ctx.font = 'bold 24px Comic Neue, sans-serif'; | |
| ctx.textAlign = 'center'; | |
| ctx.fillText('๐ท Upload Image', 128, 200); | |
| return new THREE.CanvasTexture(canvas); | |
| } | |
| let currentTexture = createPlaceholderTexture(); | |
| const planeMaterial = new THREE.MeshBasicMaterial({ map: currentTexture, side: THREE.DoubleSide }); | |
| let targetPlane = new THREE.Mesh(new THREE.PlaneGeometry(1.2, 1.2), planeMaterial); | |
| targetPlane.position.copy(CENTER); | |
| scene.add(targetPlane); | |
| function updateTextureFromUrl(url) { | |
| if (!url) { | |
| planeMaterial.map = createPlaceholderTexture(); | |
| planeMaterial.needsUpdate = true; | |
| scene.remove(targetPlane); | |
| targetPlane = new THREE.Mesh(new THREE.PlaneGeometry(1.2, 1.2), planeMaterial); | |
| targetPlane.position.copy(CENTER); | |
| scene.add(targetPlane); | |
| return; | |
| } | |
| const loader = new THREE.TextureLoader(); | |
| loader.crossOrigin = 'anonymous'; | |
| loader.load(url, (texture) => { | |
| texture.minFilter = THREE.LinearFilter; | |
| texture.magFilter = THREE.LinearFilter; | |
| planeMaterial.map = texture; | |
| planeMaterial.needsUpdate = true; | |
| const img = texture.image; | |
| if (img && img.width && img.height) { | |
| const aspect = img.width / img.height; | |
| const maxSize = 1.5; | |
| let planeWidth, planeHeight; | |
| if (aspect > 1) { | |
| planeWidth = maxSize; | |
| planeHeight = maxSize / aspect; | |
| } else { | |
| planeHeight = maxSize; | |
| planeWidth = maxSize * aspect; | |
| } | |
| scene.remove(targetPlane); | |
| targetPlane = new THREE.Mesh( | |
| new THREE.PlaneGeometry(planeWidth, planeHeight), | |
| planeMaterial | |
| ); | |
| targetPlane.position.copy(CENTER); | |
| scene.add(targetPlane); | |
| } | |
| }); | |
| } | |
| if (props.imageUrl) { | |
| updateTextureFromUrl(props.imageUrl); | |
| } | |
| const cameraGroup = new THREE.Group(); | |
| const bodyMat = new THREE.MeshStandardMaterial({ color: 0x6699cc, metalness: 0.5, roughness: 0.3 }); | |
| const body = new THREE.Mesh(new THREE.BoxGeometry(0.3, 0.22, 0.38), bodyMat); | |
| cameraGroup.add(body); | |
| const lens = new THREE.Mesh( | |
| new THREE.CylinderGeometry(0.09, 0.11, 0.18, 16), | |
| new THREE.MeshStandardMaterial({ color: 0x6699cc, metalness: 0.5, roughness: 0.3 }) | |
| ); | |
| lens.rotation.x = Math.PI / 2; | |
| lens.position.z = 0.26; | |
| cameraGroup.add(lens); | |
| scene.add(cameraGroup); | |
| const azimuthRing = new THREE.Mesh( | |
| new THREE.TorusGeometry(AZIMUTH_RADIUS, 0.04, 16, 64), | |
| new THREE.MeshStandardMaterial({ color: 0x00ff88, emissive: 0x00ff88, emissiveIntensity: 0.3 }) | |
| ); | |
| azimuthRing.rotation.x = Math.PI / 2; | |
| azimuthRing.position.y = 0.05; | |
| scene.add(azimuthRing); | |
| const azimuthHandle = new THREE.Mesh( | |
| new THREE.SphereGeometry(0.18, 16, 16), | |
| new THREE.MeshStandardMaterial({ color: 0x00ff88, emissive: 0x00ff88, emissiveIntensity: 0.5 }) | |
| ); | |
| azimuthHandle.userData.type = 'azimuth'; | |
| scene.add(azimuthHandle); | |
| const arcPoints = []; | |
| for (let i = 0; i <= 32; i++) { | |
| const angle = THREE.MathUtils.degToRad(-30 + (90 * i / 32)); | |
| arcPoints.push(new THREE.Vector3(2.8, ELEVATION_RADIUS * Math.sin(angle) + CENTER.y, ELEVATION_RADIUS * Math.cos(angle))); | |
| } | |
| const arcCurve = new THREE.CatmullRomCurve3(arcPoints); | |
| const elevationArc = new THREE.Mesh( | |
| new THREE.TubeGeometry(arcCurve, 32, 0.04, 8, false), | |
| new THREE.MeshStandardMaterial({ color: 0xff69b4, emissive: 0xff69b4, emissiveIntensity: 0.3 }) | |
| ); | |
| scene.add(elevationArc); | |
| const elevationHandle = new THREE.Mesh( | |
| new THREE.SphereGeometry(0.22, 16, 16), | |
| new THREE.MeshStandardMaterial({ color: 0xff69b4, emissive: 0xff69b4, emissiveIntensity: 0.5 }) | |
| ); | |
| elevationHandle.userData.type = 'elevation'; | |
| scene.add(elevationHandle); | |
| const distanceLineGeo = new THREE.BufferGeometry(); | |
| const distanceLine = new THREE.Line(distanceLineGeo, new THREE.LineBasicMaterial({ color: 0xffa500 })); | |
| scene.add(distanceLine); | |
| const distanceHandle = new THREE.Mesh( | |
| new THREE.SphereGeometry(0.18, 16, 16), | |
| new THREE.MeshStandardMaterial({ color: 0xffa500, emissive: 0xffa500, emissiveIntensity: 0.5 }) | |
| ); | |
| distanceHandle.userData.type = 'distance'; | |
| scene.add(distanceHandle); | |
| function updatePositions() { | |
| const distance = BASE_DISTANCE * distanceFactor; | |
| const azRad = THREE.MathUtils.degToRad(azimuthAngle); | |
| const elRad = THREE.MathUtils.degToRad(elevationAngle); | |
| const camX = distance * Math.sin(azRad) * Math.cos(elRad); | |
| const camY = distance * Math.sin(elRad) + CENTER.y; | |
| const camZ = distance * Math.cos(azRad) * Math.cos(elRad); | |
| cameraGroup.position.set(camX, camY, camZ); | |
| cameraGroup.lookAt(CENTER); | |
| azimuthHandle.position.set(AZIMUTH_RADIUS * Math.sin(azRad), 0.05, AZIMUTH_RADIUS * Math.cos(azRad)); | |
| elevationHandle.position.set(2.8, ELEVATION_RADIUS * Math.sin(elRad) + CENTER.y, ELEVATION_RADIUS * Math.cos(elRad)); | |
| const orangeDist = distance - 0.5; | |
| distanceHandle.position.set( | |
| orangeDist * Math.sin(azRad) * Math.cos(elRad), | |
| orangeDist * Math.sin(elRad) + CENTER.y, | |
| orangeDist * Math.cos(azRad) * Math.cos(elRad) | |
| ); | |
| distanceLineGeo.setFromPoints([cameraGroup.position.clone(), CENTER.clone()]); | |
| const azSnap = snapToNearest(azimuthAngle, azimuthSteps); | |
| const elSnap = snapToNearest(elevationAngle, elevationSteps); | |
| const distSnap = snapToNearest(distanceFactor, distanceSteps); | |
| const distKey = distSnap === 1 ? '1' : distSnap.toFixed(1); | |
| const prompt = '<sks> ' + azimuthNames[azSnap] + ' ' + elevationNames[String(elSnap)] + ' ' + distanceNames[distKey]; | |
| promptOverlay.textContent = prompt; | |
| } | |
| function updatePropsAndTrigger() { | |
| const azSnap = snapToNearest(azimuthAngle, azimuthSteps); | |
| const elSnap = snapToNearest(elevationAngle, elevationSteps); | |
| const distSnap = snapToNearest(distanceFactor, distanceSteps); | |
| props.value = { azimuth: azSnap, elevation: elSnap, distance: distSnap }; | |
| trigger('change', props.value); | |
| } | |
| const raycaster = new THREE.Raycaster(); | |
| const mouse = new THREE.Vector2(); | |
| let isDragging = false; | |
| let dragTarget = null; | |
| let dragStartMouse = new THREE.Vector2(); | |
| let dragStartDistance = 1.0; | |
| let dragStartY = 0; | |
| let dragStartElevation = 0; | |
| const intersection = new THREE.Vector3(); | |
| const canvas = renderer.domElement; | |
| canvas.addEventListener('mousedown', (e) => { | |
| const rect = canvas.getBoundingClientRect(); | |
| mouse.x = ((e.clientX - rect.left) / rect.width) * 2 - 1; | |
| mouse.y = -((e.clientY - rect.top) / rect.height) * 2 + 1; | |
| raycaster.setFromCamera(mouse, camera); | |
| const intersects = raycaster.intersectObjects([azimuthHandle, elevationHandle, distanceHandle]); | |
| if (intersects.length > 0) { | |
| isDragging = true; | |
| dragTarget = intersects[0].object; | |
| dragTarget.material.emissiveIntensity = 1.0; | |
| dragTarget.scale.setScalar(1.3); | |
| dragStartMouse.copy(mouse); | |
| dragStartDistance = distanceFactor; | |
| dragStartY = e.clientY; | |
| dragStartElevation = elevationAngle; | |
| canvas.style.cursor = 'grabbing'; | |
| } | |
| }); | |
| canvas.addEventListener('mousemove', (e) => { | |
| const rect = canvas.getBoundingClientRect(); | |
| mouse.x = ((e.clientX - rect.left) / rect.width) * 2 - 1; | |
| mouse.y = -((e.clientY - rect.top) / rect.height) * 2 + 1; | |
| if (isDragging && dragTarget) { | |
| raycaster.setFromCamera(mouse, camera); | |
| if (dragTarget.userData.type === 'azimuth') { | |
| const plane = new THREE.Plane(new THREE.Vector3(0, 1, 0), -0.05); | |
| if (raycaster.ray.intersectPlane(plane, intersection)) { | |
| azimuthAngle = THREE.MathUtils.radToDeg(Math.atan2(intersection.x, intersection.z)); | |
| if (azimuthAngle < 0) azimuthAngle += 360; | |
| } | |
| } else if (dragTarget.userData.type === 'elevation') { | |
| const deltaY = dragStartY - e.clientY; | |
| const sensitivity = 0.4; | |
| elevationAngle = THREE.MathUtils.clamp(dragStartElevation + deltaY * sensitivity, -30, 60); | |
| } else if (dragTarget.userData.type === 'distance') { | |
| const deltaY = mouse.y - dragStartMouse.y; | |
| distanceFactor = THREE.MathUtils.clamp(dragStartDistance - deltaY * 1.5, 0.6, 1.4); | |
| } | |
| updatePositions(); | |
| } else { | |
| raycaster.setFromCamera(mouse, camera); | |
| const intersects = raycaster.intersectObjects([azimuthHandle, elevationHandle, distanceHandle]); | |
| [azimuthHandle, elevationHandle, distanceHandle].forEach(h => { | |
| h.material.emissiveIntensity = 0.5; | |
| h.scale.setScalar(1); | |
| }); | |
| if (intersects.length > 0) { | |
| intersects[0].object.material.emissiveIntensity = 0.8; | |
| intersects[0].object.scale.setScalar(1.1); | |
| canvas.style.cursor = 'grab'; | |
| } else { | |
| canvas.style.cursor = 'default'; | |
| } | |
| } | |
| }); | |
| const onMouseUp = () => { | |
| if (dragTarget) { | |
| dragTarget.material.emissiveIntensity = 0.5; | |
| dragTarget.scale.setScalar(1); | |
| const targetAz = snapToNearest(azimuthAngle, azimuthSteps); | |
| const targetEl = snapToNearest(elevationAngle, elevationSteps); | |
| const targetDist = snapToNearest(distanceFactor, distanceSteps); | |
| const startAz = azimuthAngle, startEl = elevationAngle, startDist = distanceFactor; | |
| const startTime = Date.now(); | |
| function animateSnap() { | |
| const t = Math.min((Date.now() - startTime) / 200, 1); | |
| const ease = 1 - Math.pow(1 - t, 3); | |
| let azDiff = targetAz - startAz; | |
| if (azDiff > 180) azDiff -= 360; | |
| if (azDiff < -180) azDiff += 360; | |
| azimuthAngle = startAz + azDiff * ease; | |
| if (azimuthAngle < 0) azimuthAngle += 360; | |
| if (azimuthAngle >= 360) azimuthAngle -= 360; | |
| elevationAngle = startEl + (targetEl - startEl) * ease; | |
| distanceFactor = startDist + (targetDist - startDist) * ease; | |
| updatePositions(); | |
| if (t < 1) requestAnimationFrame(animateSnap); | |
| else updatePropsAndTrigger(); | |
| } | |
| animateSnap(); | |
| } | |
| isDragging = false; | |
| dragTarget = null; | |
| canvas.style.cursor = 'default'; | |
| }; | |
| canvas.addEventListener('mouseup', onMouseUp); | |
| canvas.addEventListener('mouseleave', onMouseUp); | |
| canvas.addEventListener('touchstart', (e) => { | |
| e.preventDefault(); | |
| const touch = e.touches[0]; | |
| const rect = canvas.getBoundingClientRect(); | |
| mouse.x = ((touch.clientX - rect.left) / rect.width) * 2 - 1; | |
| mouse.y = -((touch.clientY - rect.top) / rect.height) * 2 + 1; | |
| raycaster.setFromCamera(mouse, camera); | |
| const intersects = raycaster.intersectObjects([azimuthHandle, elevationHandle, distanceHandle]); | |
| if (intersects.length > 0) { | |
| isDragging = true; | |
| dragTarget = intersects[0].object; | |
| dragTarget.material.emissiveIntensity = 1.0; | |
| dragTarget.scale.setScalar(1.3); | |
| dragStartMouse.copy(mouse); | |
| dragStartDistance = distanceFactor; | |
| dragStartY = touch.clientY; | |
| dragStartElevation = elevationAngle; | |
| } | |
| }, { passive: false }); | |
| canvas.addEventListener('touchmove', (e) => { | |
| e.preventDefault(); | |
| const touch = e.touches[0]; | |
| const rect = canvas.getBoundingClientRect(); | |
| mouse.x = ((touch.clientX - rect.left) / rect.width) * 2 - 1; | |
| mouse.y = -((touch.clientY - rect.top) / rect.height) * 2 + 1; | |
| if (isDragging && dragTarget) { | |
| raycaster.setFromCamera(mouse, camera); | |
| if (dragTarget.userData.type === 'azimuth') { | |
| const plane = new THREE.Plane(new THREE.Vector3(0, 1, 0), -0.05); | |
| if (raycaster.ray.intersectPlane(plane, intersection)) { | |
| azimuthAngle = THREE.MathUtils.radToDeg(Math.atan2(intersection.x, intersection.z)); | |
| if (azimuthAngle < 0) azimuthAngle += 360; | |
| } | |
| } else if (dragTarget.userData.type === 'elevation') { | |
| const deltaY = dragStartY - touch.clientY; | |
| const sensitivity = 0.4; | |
| elevationAngle = THREE.MathUtils.clamp(dragStartElevation + deltaY * sensitivity, -30, 60); | |
| } else if (dragTarget.userData.type === 'distance') { | |
| const deltaY = mouse.y - dragStartMouse.y; | |
| distanceFactor = THREE.MathUtils.clamp(dragStartDistance - deltaY * 1.5, 0.6, 1.4); | |
| } | |
| updatePositions(); | |
| } | |
| }, { passive: false }); | |
| canvas.addEventListener('touchend', (e) => { | |
| e.preventDefault(); | |
| onMouseUp(); | |
| }, { passive: false }); | |
| canvas.addEventListener('touchcancel', (e) => { | |
| e.preventDefault(); | |
| onMouseUp(); | |
| }, { passive: false }); | |
| updatePositions(); | |
| function render() { | |
| requestAnimationFrame(render); | |
| renderer.render(scene, camera); | |
| } | |
| render(); | |
| new ResizeObserver(() => { | |
| camera.aspect = wrapper.clientWidth / wrapper.clientHeight; | |
| camera.updateProjectionMatrix(); | |
| renderer.setSize(wrapper.clientWidth, wrapper.clientHeight); | |
| }).observe(wrapper); | |
| wrapper._updateFromProps = (newVal) => { | |
| if (newVal && typeof newVal === 'object') { | |
| azimuthAngle = newVal.azimuth ?? azimuthAngle; | |
| elevationAngle = newVal.elevation ?? elevationAngle; | |
| distanceFactor = newVal.distance ?? distanceFactor; | |
| updatePositions(); | |
| } | |
| }; | |
| wrapper._updateTexture = updateTextureFromUrl; | |
| let lastImageUrl = props.imageUrl; | |
| let lastValue = JSON.stringify(props.value); | |
| setInterval(() => { | |
| if (props.imageUrl !== lastImageUrl) { | |
| lastImageUrl = props.imageUrl; | |
| updateTextureFromUrl(props.imageUrl); | |
| } | |
| const currentValue = JSON.stringify(props.value); | |
| if (currentValue !== lastValue) { | |
| lastValue = currentValue; | |
| if (props.value && typeof props.value === 'object') { | |
| azimuthAngle = props.value.azimuth ?? azimuthAngle; | |
| elevationAngle = props.value.elevation ?? elevationAngle; | |
| distanceFactor = props.value.distance ?? distanceFactor; | |
| updatePositions(); | |
| } | |
| } | |
| }, 100); | |
| }; | |
| initScene(); | |
| })(); | |
| """ | |
| super().__init__( | |
| value=value, | |
| html_template=html_template, | |
| js_on_load=js_on_load, | |
| imageUrl=imageUrl, | |
| **kwargs | |
| ) | |
| # --- UI --- | |
| with gr.Blocks() as demo: | |
| # HOME Button ์ถ๊ฐ | |
| gr.HTML(""" | |
| <div class="home-button-container"> | |
| <a href="https://www.humangen.ai" target="_blank" class="home-button"> | |
| ๐ HOME | |
| </a> | |
| <span class="url-display">๐ www.humangen.ai</span> | |
| </div> | |
| """) | |
| gr.LoginButton(value="Option: HuggingFace 'Login' for extra GPU quota +", size="sm") | |
| # Header | |
| gr.HTML(""" | |
| <div class="header-container"> | |
| <div class="header-title">๐ฌ 3D CAMERA ANGLE EDITOR ๐ฌ</div> | |
| <div class="header-subtitle">Transform your images with precise camera control using AI!</div> | |
| <div style="margin-top:12px"> | |
| <span class="stats-badge">๐ฎ 3D Control</span> | |
| <span class="stats-badge">๐ 8 Azimuths</span> | |
| <span class="stats-badge">๐ท 4 Elevations</span> | |
| <span class="stats-badge">๐ 3 Distances</span> | |
| <span class="stats-badge">โก Lightning Fast</span> | |
| </div> | |
| </div> | |
| """) | |
| gr.HTML('<div class="info-box">๐ฏ <b>How to use:</b> Upload an image โ Adjust camera angle using 3D viewport or sliders โ Click Generate!</div>') | |
| with gr.Row(): | |
| # Left column: Input image and controls | |
| with gr.Column(scale=1): | |
| image = gr.Image(label="๐ Input Image", type="pil", height=300) | |
| gr.HTML('<div class="info-box">๐ฎ <b>3D Camera Control</b> - Drag the colored handles to adjust view</div>') | |
| camera_3d = CameraControl3D( | |
| value={"azimuth": 0, "elevation": 0, "distance": 1.0}, | |
| elem_id="camera-3d-control" | |
| ) | |
| run_btn = gr.Button("๐ GENERATE!", variant="primary", size="lg") | |
| gr.HTML('<div class="info-box">๐๏ธ <b>Slider Controls</b> - Fine-tune your camera settings</div>') | |
| azimuth_slider = gr.Slider( | |
| label="๐ Azimuth (Horizontal Rotation)", | |
| minimum=0, | |
| maximum=315, | |
| step=45, | |
| value=0, | |
| info="0ยฐ=front, 90ยฐ=right, 180ยฐ=back, 270ยฐ=left" | |
| ) | |
| elevation_slider = gr.Slider( | |
| label="๐ Elevation (Vertical Angle)", | |
| minimum=-30, | |
| maximum=60, | |
| step=30, | |
| value=0, | |
| info="-30ยฐ=low angle, 0ยฐ=eye level, 60ยฐ=high angle" | |
| ) | |
| distance_slider = gr.Slider( | |
| label="๐ Distance", | |
| minimum=0.6, | |
| maximum=1.4, | |
| step=0.4, | |
| value=1.0, | |
| info="0.6=close-up, 1.0=medium, 1.4=wide" | |
| ) | |
| prompt_preview = gr.Textbox( | |
| label="๐ Generated Prompt", | |
| value="<sks> front view eye-level shot medium shot", | |
| interactive=False | |
| ) | |
| # Right column: Output | |
| with gr.Column(scale=1): | |
| result = gr.Image(label="๐ผ๏ธ Output Image", height=500) | |
| with gr.Accordion("โ๏ธ Advanced Settings (Quality Control)", open=False): | |
| gr.HTML('<div class="info-box">๐ก <b>Lightning LoRA:</b> Optimized for 4 steps. guidance_scale=1.0 recommended. Max resolution: 768px</div>') | |
| with gr.Row(): | |
| num_inference_steps = gr.Slider( | |
| label="๐ข Inference Steps", | |
| minimum=2, | |
| maximum=8, | |
| step=1, | |
| value=4, | |
| info="Lightning LoRA: 4 steps optimal" | |
| ) | |
| guidance_scale = gr.Slider( | |
| label="๐ฏ Guidance Scale", | |
| minimum=1.0, | |
| maximum=5.0, | |
| step=0.5, | |
| value=1.0, | |
| info="1.0 for Lightning LoRA" | |
| ) | |
| with gr.Row(): | |
| seed = gr.Slider(label="๐ฒ Seed", minimum=0, maximum=MAX_SEED, step=1, value=0) | |
| randomize_seed = gr.Checkbox(label="๐ Randomize Seed", value=True) | |
| with gr.Row(): | |
| width = gr.Slider(label="๐ Width", minimum=256, maximum=2048, step=8, value=768) | |
| height = gr.Slider(label="๐ Height", minimum=256, maximum=2048, step=8, value=768) | |
| # Footer | |
| gr.HTML(""" | |
| <div class="footer-comic"> | |
| <p style="font-family:'Bangers',cursive;font-size:1.5rem;letter-spacing:2px">๐ฌ 3D CAMERA ANGLE EDITOR ๐ฌ</p> | |
| <p>Powered by Qwen Image Edit 2511 + Lightning LoRA + Multi-Angles LoRA</p> | |
| <p>๐ฎ 3D Control โข ๐ Precise Angles โข โก Fast Generation โข ๐จ High Quality</p> | |
| <p style="margin-top:10px"><a href="https://www.humangen.ai" target="_blank" style="color:#FACC15;text-decoration:none;font-weight:bold;">๐ www.humangen.ai</a></p> | |
| </div> | |
| """) | |
| # --- Event Handlers --- | |
| def update_prompt_from_sliders(azimuth, elevation, distance): | |
| """Update prompt preview when sliders change.""" | |
| prompt = build_camera_prompt(azimuth, elevation, distance) | |
| return prompt | |
| def sync_3d_to_sliders(camera_value): | |
| """Sync 3D control changes to sliders.""" | |
| if camera_value and isinstance(camera_value, dict): | |
| az = camera_value.get('azimuth', 0) | |
| el = camera_value.get('elevation', 0) | |
| dist = camera_value.get('distance', 1.0) | |
| prompt = build_camera_prompt(az, el, dist) | |
| return az, el, dist, prompt | |
| return gr.update(), gr.update(), gr.update(), gr.update() | |
| def sync_sliders_to_3d(azimuth, elevation, distance): | |
| """Sync slider changes to 3D control.""" | |
| return {"azimuth": azimuth, "elevation": elevation, "distance": distance} | |
| def update_3d_image(image): | |
| """Update the 3D component with the uploaded image.""" | |
| if image is None: | |
| return gr.update(imageUrl=None) | |
| import base64 | |
| from io import BytesIO | |
| buffered = BytesIO() | |
| image.save(buffered, format="PNG") | |
| img_str = base64.b64encode(buffered.getvalue()).decode() | |
| data_url = f"data:image/png;base64,{img_str}" | |
| return gr.update(imageUrl=data_url) | |
| # Slider -> Prompt preview | |
| for slider in [azimuth_slider, elevation_slider, distance_slider]: | |
| slider.change( | |
| fn=update_prompt_from_sliders, | |
| inputs=[azimuth_slider, elevation_slider, distance_slider], | |
| outputs=[prompt_preview] | |
| ) | |
| # 3D control -> Sliders + Prompt | |
| camera_3d.change( | |
| fn=sync_3d_to_sliders, | |
| inputs=[camera_3d], | |
| outputs=[azimuth_slider, elevation_slider, distance_slider, prompt_preview] | |
| ) | |
| # Sliders -> 3D control | |
| for slider in [azimuth_slider, elevation_slider, distance_slider]: | |
| slider.release( | |
| fn=sync_sliders_to_3d, | |
| inputs=[azimuth_slider, elevation_slider, distance_slider], | |
| outputs=[camera_3d] | |
| ) | |
| # Generate button | |
| run_btn.click( | |
| fn=infer_camera_edit, | |
| inputs=[image, azimuth_slider, elevation_slider, distance_slider, seed, randomize_seed, guidance_scale, num_inference_steps, height, width], | |
| outputs=[result, seed, prompt_preview] | |
| ) | |
| # Image upload -> update dimensions AND update 3D preview | |
| image.upload( | |
| fn=update_dimensions_on_upload, | |
| inputs=[image], | |
| outputs=[width, height] | |
| ).then( | |
| fn=update_3d_image, | |
| inputs=[image], | |
| outputs=[camera_3d] | |
| ) | |
| # Also handle image clear | |
| image.clear( | |
| fn=lambda: gr.update(imageUrl=None), | |
| outputs=[camera_3d] | |
| ) | |
| if __name__ == "__main__": | |
| head = '<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r128/three.min.js"></script>' | |
| demo.launch(head=head, css=COMIC_CSS, theme=gr.themes.Soft()) |