Spaces:
Paused
Paused
| import gradio as gr | |
| import torch | |
| from diffusers import DiffusionPipeline | |
| import uuid | |
| import os | |
| # Force CPU only | |
| device = "cpu" | |
| dtype = torch.float32 | |
| # Load pipeline | |
| pipe = DiffusionPipeline.from_pretrained( | |
| "black-forest-labs/FLUX.1-dev", | |
| torch_dtype=dtype, | |
| ) | |
| pipe.enable_sequential_cpu_offload() | |
| pipe.enable_attention_slicing() | |
| pipe.safety_checker = None | |
| # Load your LoRA | |
| pipe.load_lora_weights("rahul7star/ra3hul", torch_dtype=dtype) | |
| def generate_with_image(image, prompt: str): | |
| """ | |
| Generate an image from prompt + optional input image. | |
| """ | |
| if not prompt.strip(): | |
| return None, None | |
| # Run inference | |
| if image is not None: | |
| image = image.convert("RGB") | |
| result = pipe( | |
| prompt, | |
| image=image, # conditioning on uploaded image | |
| height=256, # safer for CPU | |
| width=256, | |
| num_inference_steps=20, | |
| guidance_scale=7.5, | |
| ).images[0] | |
| # Save with unique filename | |
| filename = f"flux_{uuid.uuid4().hex[:8]}.png" | |
| save_path = os.path.join("/tmp", filename) | |
| result.save(save_path) | |
| return result, save_path | |
| # Gradio UI | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# ๐จ FLUX.1-dev (CPU Only) + LoRA `ra3hul`") | |
| with gr.Row(): | |
| with gr.Column(): | |
| image_input = gr.Image(type="pil", label="Upload an Image (optional)") | |
| prompt_box = gr.Textbox( | |
| label="Enter your prompt", | |
| placeholder="Describe the image you want...", | |
| lines=2 | |
| ) | |
| generate_btn = gr.Button("๐ Generate", variant="primary") | |
| with gr.Column(): | |
| output_img = gr.Image(label="Generated Image") | |
| download_btn = gr.File(label="Download Image") | |
| generate_btn.click( | |
| fn=generate_with_image, | |
| inputs=[image_input, prompt_box], | |
| outputs=[output_img, download_btn] | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch( | |
| server_name="0.0.0.0", | |
| server_port=7860, | |
| share=False, | |
| show_error=True | |
| ) | |