GRADIO / app.py
An-Egoistic-Developer-Full-Of-Knowledge's picture
Update app.py
0f14d6f verified
import gradio as gr
from transformers import pipeline
# βœ… Use a lightweight, always-free model
jarvis = pipeline("text2text-generation", model="google/flan-t5-small")
# Function to handle messages (compatible with new Gradio)
def chat(message, history):
# Reconstruct a text-based chat history
context = ""
for h in history:
context += f"User: {h['content']}\nJarvis: {h.get('response', '')}\n"
context += f"User: {message}\nJarvis:"
# Generate response
response = jarvis(context, max_new_tokens=128, temperature=0.7, do_sample=True)
reply = response[0]["generated_text"]
# Return message in new Gradio "messages" format
history.append({"role": "user", "content": message})
history.append({"role": "assistant", "content": reply})
return "", history
# Gradio Chat Interface (new style)
gr.ChatInterface(
fn=chat,
title="Jarvis AI V2",
description="Your personal AI assistant β€” accessible anywhere in the world.",
theme="soft",
type="messages", # πŸ‘ˆ important to avoid tuple errors
).launch(share=True)