Spaces:
Runtime error
Runtime error
File size: 791 Bytes
a162149 b2a0290 a162149 b2a0290 a162149 71be267 b2a0290 a162149 b2a0290 a162149 b2a0290 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 |
from transformers import BlipProcessor, BlipForConditionalGeneration
from PIL import Image
import gradio as gr
processor = BlipProcessor.from_pretrained("Salesforce/blip-image-captioning-large")
model = BlipForConditionalGeneration.from_pretrained("Salesforce/blip-image-captioning-large")
def image_to_prompt(image):
inputs = processor(images=image, return_tensors="pt")
outputs = model.generate(**inputs, max_length=50)
caption = processor.decode(outputs[0], skip_special_tokens=True)
return caption
iface = gr.Interface(
fn=image_to_prompt,
inputs=gr.Image(type="pil"),
outputs=gr.Textbox(label="Generated Prompt"),
title="Image to Prompt Generator (using BLIP)",
description="Upload an image and get a text prompt describing it.",
)
iface.launch() |