|
|
import gradio as gr |
|
|
from transformers import pipeline |
|
|
|
|
|
|
|
|
chat = pipeline("text-generation", model="redmint/studybuddy-ai", device=-1) |
|
|
|
|
|
|
|
|
def respond(message, history): |
|
|
|
|
|
context = "" |
|
|
for user, bot in history: |
|
|
context += f"Question: {user}\nAnswer: {bot}\n" |
|
|
|
|
|
|
|
|
prompt = context + f"Question: {message}\nAnswer:" |
|
|
|
|
|
|
|
|
output = chat( |
|
|
prompt, |
|
|
max_new_tokens=200, |
|
|
temperature=0.4, |
|
|
top_p=0.9, |
|
|
do_sample=True, |
|
|
)[0]["generated_text"] |
|
|
|
|
|
|
|
|
if "Answer:" in output: |
|
|
response = output.split("Answer:")[-1].strip() |
|
|
else: |
|
|
response = output.strip() |
|
|
|
|
|
history.append((message, response)) |
|
|
return history, history |
|
|
|
|
|
|
|
|
with gr.Blocks() as demo: |
|
|
chatbot = gr.Chatbot() |
|
|
msg = gr.Textbox(label="Ask StudyBuddyAI") |
|
|
msg.submit(respond, [msg, chatbot], [chatbot, chatbot]) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch() |
|
|
|