File size: 1,731 Bytes
876ff30
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import gradio as gr
from transformers import pipeline

# Load the Hugging Face model for text tasks
text_generator = pipeline("text2text-generation", model="google/flan-t5-small")

# --- Helper functions ---
def summarize_text(text):
    prompt = f"Summarize in 3 sentences: {text}"
    result = text_generator(prompt, max_length=100, do_sample=False)
    return result[0]['generated_text']

def answer_question(topic, question):
    prompt = f"Topic: {topic}\nAnswer this question: {question}"
    result = text_generator(prompt, max_length=150, do_sample=False)
    return result[0]['generated_text']

def generate_story(theme):
    prompt = f"Write a 200-word story about: {theme}"
    result = text_generator(prompt, max_length=300, do_sample=True)
    return result[0]['generated_text']

# --- Gradio UI ---
with gr.Blocks() as demo:
    gr.Markdown("# Mini AI Text Assistant")
    
    task = gr.Radio(["Summarizer", "Q&A Bot", "Story Generator"], label="Choose a task")
    
    input_text = gr.Textbox(label="Enter text or topic/theme", placeholder="Type here...")
    input_question = gr.Textbox(label="Enter question (for Q&A only)", visible=False)
    output = gr.Textbox(label="Output")
    
    def process(task_choice, text, question):
        if task_choice == "Summarizer":
            return summarize_text(text)
        elif task_choice == "Q&A Bot":
            return answer_question(text, question)
        elif task_choice == "Story Generator":
            return generate_story(text)

    task.change(lambda t: gr.update(visible=(t=="Q&A Bot")), inputs=task, outputs=input_question)
    btn = gr.Button("Run")
    btn.click(process, inputs=[task, input_text, input_question], outputs=output)

demo.launch()