clip_score / app.py
sunhill's picture
temporary commit
6cd6130
raw
history blame
876 Bytes
import evaluate
import gradio as gr
metric = evaluate.load("clip_score.py")
def compute_clip_score(image, text):
results = metric.compute(predictions=[text], images=[image])
return results["clip_score"]
iface = gr.Interface(
fn=compute_clip_score,
inputs=[
gr.Image(type="pil"),
gr.Textbox(lines=2, placeholder="Enter text here..."),
],
outputs=gr.Number(label="CLIP Score"),
title="CLIP Score Evaluator",
description="Evaluate the alignment between an image and a text using CLIP Score.",
examples=[
[
"https://images.unsplash.com/photo-1720539222585-346e73f01536",
"A cat sitting on a couch.",
],
[
"https://images.unsplash.com/photo-1694253987647-4eebcf679974",
"A scenic view of mountains during sunset.",
],
],
)
iface.launch()