Sunder34m2010's picture
Create app.py
801aeee verified
import gradio as gr
from transformers import pipeline
# Load the text-generation pipeline
pipe = pipeline(
"text-generation",
model="ibm-granite/granite-3.3-2b-instruct",
)
def chat(user_input, history):
# Convert chat history to Granite-compatible message format
messages = []
for user_msg, bot_msg in history:
messages.append({"role": "user", "content": user_msg})
messages.append({"role": "assistant", "content": bot_msg})
messages.append({"role": "user", "content": user_input})
# Generate response
output = pipe(
messages,
max_new_tokens=256,
do_sample=True,
temperature=0.7,
top_p=0.9,
)
# Extract assistant reply
assistant_reply = output[0]["generated_text"][-1]["content"]
return assistant_reply
# Gradio UI
with gr.Blocks() as demo:
gr.Markdown("# 🤖 IBM Granite 3.3 Chatbot")
gr.Markdown("Powered by **ibm-granite/granite-3.3-2b-instruct**")
chatbot = gr.Chatbot()
msg = gr.Textbox(
placeholder="Ask me anything...",
label="Your message"
)
clear = gr.Button("Clear")
def respond(message, chat_history):
bot_message = chat(message, chat_history)
chat_history.append((message, bot_message))
return "", chat_history
msg.submit(respond, [msg, chatbot], [msg, chatbot])
clear.click(lambda: [], None, chatbot)
demo.launch()