Spaces:
Build error
Build error
| import gradio as gr | |
| import numpy as np | |
| from PIL import Image | |
| import torch | |
| from rudalle.pipelines import generate_images, show | |
| from rudalle import get_rudalle_model, get_tokenizer, get_vae | |
| from rudalle.utils import seed_everything | |
| import warnings | |
| warnings.filterwarnings("ignore") | |
| # Load model and tokenizer | |
| device = "cuda" if torch.cuda.is_available() else "cpu" | |
| model = get_rudalle_model("Malevich", pretrained=True, fp16=True, device=device) | |
| tokenizer = get_tokenizer() | |
| vae = get_vae(dwt=False).to(device) | |
| def generate_images(prompt, negative_prompt="", progress=gr.Progress()): | |
| """Generate 4 images using ruDALLE Malevich model""" | |
| if not prompt.strip(): | |
| prompt = "beautiful landscape" # Default prompt if empty | |
| try: | |
| # Generate 4 images | |
| images = [] | |
| pil_images = [] | |
| with progress.tqdm(total=4, desc="Generating images") as pbar: | |
| for i in range(4): | |
| # Generate image | |
| _pil_image = generate_images( | |
| prompt, | |
| tokenizer, | |
| model, | |
| vae, | |
| top_k=2048, | |
| top_p=0.995, | |
| temperature=1.0, | |
| image_count=1 | |
| )[0] | |
| images.append(_pil_image) | |
| pil_images.append(_pil_image) | |
| pbar.update(1) | |
| return pil_images | |
| except Exception as e: | |
| print(f"Error generating images: {e}") | |
| # Return placeholder images on error | |
| placeholder = Image.new('RGB', (512, 512), color='gray') | |
| return [placeholder] * 4 | |
| def show_selected_image(gallery, evt: gr.SelectData): | |
| """Display the selected image in larger view""" | |
| if gallery and evt.index < len(gallery): | |
| return gallery[evt.index] | |
| return None | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# π¨ ruDALLE Malevich Image Generation") | |
| gr.Markdown("Generate beautiful images using the ai-forever/rudalle-Malevich model. [Built with anycoder](https://huggingface.co/spaces/akhaliq/anycoder)") | |
| with gr.Row(): | |
| with gr.Column(scale=3): | |
| prompt_input = gr.Textbox( | |
| label="Prompt", | |
| placeholder="Enter your prompt here (optional)", | |
| lines=2 | |
| ) | |
| negative_prompt_input = gr.Textbox( | |
| label="Negative Prompt", | |
| placeholder="What to avoid in the image (optional)", | |
| lines=2 | |
| ) | |
| with gr.Row(): | |
| generate_btn = gr.Button("π¨ Generate Images", variant="primary", size="lg") | |
| clear_btn = gr.Button("ποΈ Clear", size="lg") | |
| with gr.Column(scale=2): | |
| gr.Markdown(""" | |
| ### Instructions: | |
| - Enter a prompt or leave empty for random generation | |
| - Add negative prompts to exclude elements | |
| - Click Generate to create 4 images | |
| - Click any image to view it larger | |
| """) | |
| with gr.Row(): | |
| gallery = gr.Gallery( | |
| label="Generated Images", | |
| columns=2, | |
| rows=2, | |
| height="auto", | |
| allow_preview=True, | |
| show_label=True, | |
| elem_id="gallery" | |
| ) | |
| with gr.Row(): | |
| selected_image = gr.Image( | |
| label="Selected Image (Click an image above)", | |
| height=512, | |
| width=512, | |
| interactive=False | |
| ) | |
| # Event handlers | |
| generate_btn.click( | |
| fn=generate_images, | |
| inputs=[prompt_input, negative_prompt_input], | |
| outputs=[gallery], | |
| api_visibility="public" | |
| ) | |
| gallery.select( | |
| fn=show_selected_image, | |
| inputs=[gallery], | |
| outputs=[selected_image], | |
| api_visibility="public" | |
| ) | |
| clear_btn.click( | |
| fn=lambda: (None, None, None), | |
| outputs=[prompt_input, negative_prompt_input, gallery], | |
| api_visibility="public" | |
| ) | |
| # Generate on load with empty prompt | |
| demo.load( | |
| fn=lambda: generate_images(""), | |
| outputs=[gallery], | |
| api_visibility="public" | |
| ) | |
| # Launch with modern Gradio 6 theme and styling | |
| demo.launch( | |
| theme=gr.themes.Soft( | |
| primary_hue="blue", | |
| secondary_hue="indigo", | |
| neutral_hue="slate", | |
| font=gr.themes.GoogleFont("Inter"), | |
| text_size="lg", | |
| spacing_size="lg", | |
| radius_size="md" | |
| ).set( | |
| button_primary_background_fill="*primary_600", | |
| button_primary_background_fill_hover="*primary_700", | |
| block_title_text_weight="600", | |
| ), | |
| css=""" | |
| #gallery { | |
| border: 2px solid var(--primary_200); | |
| border-radius: 12px; | |
| padding: 10px; | |
| background: var(--background_fill_secondary); | |
| } | |
| .gradio-container { | |
| max-width: 1200px !important; | |
| } | |
| """, | |
| footer_links=[ | |
| {"label": "Built with anycoder", "url": "https://huggingface.co/spaces/akhaliq/anycoder"}, | |
| {"label": "ruDALLE Model", "url": "https://huggingface.co/ai-forever/rudalle-Malevich"} | |
| ] | |
| ) |