Spaces:
Sleeping
Sleeping
File size: 4,801 Bytes
aef9821 8ff9c6b aef9821 65f4c3e af45bd8 aef9821 123cef9 c211272 123cef9 a749d12 65f4c3e c211272 22d1514 c114913 65f4c3e 22d1514 aef9821 8ff9c6b aef9821 22d1514 f5edebd 22d1514 aef9821 11b10f6 aef9821 11b10f6 aef9821 c211272 11b10f6 aef9821 11b10f6 c211272 aef9821 11b10f6 aef9821 c211272 f5edebd 8378a28 65f4c3e c211272 8f3ecdd 11b10f6 f5edebd 8f3ecdd f5edebd 8f3ecdd 8378a28 c211272 8378a28 c211272 8378a28 65f4c3e 8378a28 65f4c3e e3de82e 22d1514 8f3ecdd 22d1514 c114913 347650c a749d12 f5edebd a749d12 347650c f5edebd e3de82e 8378a28 347650c c211272 347650c 8378a28 347650c e3de82e 8378a28 0f16255 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 | import gradio as gr
import requests
import random
from transformers import pipeline
classifier = pipeline("zero-shot-classification", model="valhalla/distilbart-mnli-12-3")
labels = [
"animals", "people", "places", "history", "science", "art", "technology",
"sports", "food", "clothing", "home", "entertainment", "education", "nature", "transportation"
]
def preprocess_topic(topic):
topic = topic.lower().strip()
mapping = {
"shirt": "clothing item shirt",
"jeans": "clothing item jeans",
"shoes": "clothing item shoes",
"dress": "clothing item dress",
"sandals": "clothing item sandals",
"cookie": "sweet snack cookie",
"orcas": "marine mammal orcas",
"penguin": "bird that swims penguin",
"whale": "large marine animal whale",
"floor": "interior surface floor",
"blanket": "household item blanket",
"bed": "furniture item bed",
"lamp": "household lighting lamp",
"girl": "young person female",
"bag": "fashion accessory bag",
"kids": "young people children",
}
return mapping.get(topic, topic)
random_topics = [
"cats", "space", "chocolate", "Egypt", "Leonardo da Vinci",
"volcanoes", "Tokyo", "honeybees", "quantum physics", "orcas"
]
def get_wikipedia_facts(topic):
if not topic.strip():
return "Please enter a topic or use 'Surprise me!'", None, None
headers = {
"User-Agent": "SmartFactApp/1.0"
}
params = {
"action": "query",
"format": "json",
"prop": "extracts|pageimages",
"exintro": True,
"explaintext": True,
"piprop": "thumbnail",
"pithumbsize": 400,
"generator": "search",
"gsrsearch": topic,
"gsrlimit": 1,
}
try:
response = requests.get("https://en.wikipedia.org/w/api.php", params=params, headers=headers, timeout=5)
data = response.json()
pages = data.get("query", {}).get("pages", {})
if not pages:
return f"Sorry, no information found for '{topic}'.", None, None
page = next(iter(pages.values()))
extract_text = page.get("extract", "")
image_url = page.get("thumbnail", {}).get("source", None)
sentences = [s.strip() for s in extract_text.replace("\n", " ").split(". ") if s.strip()]
facts = random.sample(sentences, min(3, len(sentences)))
facts = [fact if fact.endswith(".") else fact + "." for fact in facts]
facts_text = "\n\n".join(f"π‘ {fact}" for fact in facts)
processed_input = preprocess_topic(topic)
classification = classifier(processed_input, candidate_labels=labels)
top_labels = classification["labels"][:3]
top_scores = classification["scores"][:3]
classification_text = "π§ Top categories:\n"
if top_scores[0] < 0.3:
classification_text = "π§ Category uncertain.\n" + classification_text
for label, score in zip(top_labels, top_scores):
classification_text += f"- {label} ({score:.2%})\n"
return facts_text, image_url, classification_text
except Exception as e:
print("Error:", e)
return "Oops! Something went wrong while fetching your facts.", None, None
def surprise_topic(_):
topic = random.choice(random_topics)
return get_wikipedia_facts(topic)
with gr.Blocks() as demo:
gr.HTML("""
<style>
body {
background-color: #ADD8E6 !important;
}
.gradio-container {
background-color: transparent !important;
}
</style>
""")
gr.Markdown("""
# π Smart Wikipedia Fact Finder
Search any topic and discover:
- π Three interesting facts
- πΌοΈ A related image
- π§ AI-predicted topic category
π Try something like **"bed"**, **"quantum physics"**, or click **π² Surprise me!**
""")
with gr.Row():
with gr.Column(scale=3):
topic_input = gr.Textbox(label="Enter a Topic", placeholder="e.g. Eiffel Tower, cookies, World War II")
with gr.Column(scale=1):
surprise_button = gr.Button("π² Surprise me!")
gr.Markdown("---")
with gr.Row():
with gr.Column():
facts_output = gr.Textbox(label="π Wikipedia Facts", lines=6)
classification_output = gr.Textbox(label="π§ Topic Classification")
with gr.Column():
image_output = gr.Image(label="πΌοΈ Related Image")
topic_input.submit(get_wikipedia_facts, inputs=topic_input, outputs=[facts_output, image_output, classification_output])
surprise_button.click(surprise_topic, inputs=None, outputs=[facts_output, image_output, classification_output])
if __name__ == "__main__":
demo.launch()
|