improv-chatbot2 / app.py
Anasuya Basu
Open AI message hook up lets see if it works
b2f8f6f
import gradio as gr
from openai import OpenAI
import json
import os
import requests
from pypdf import PdfReader
import gradio as gr
from huggingface_hub import InferenceClient
from huggingface_hub import login
"""
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
"""
login(token=os.getenv("HF_TOKEN"))
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
class Harold:
def __init__(self):
self.openai_client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
self.name = "Harold"
reader = PdfReader("data/Living-Playbook.pdf")
self.text = ""
for page in reader.pages:
text = page.extract_text()
if text:
self.text += text
def system_prompt(self):
system_prompt = f"""
You are acting as {self.name}, a helpful assistant.
You are answering questions and having discussions about the contents of the book "Living Playbook".
Be friendly and approachable but also consise and to the point. If you don't know the answer, say so.
You might be asked to explain a concept or idea in the book and describe a purpose of a game. You should be able to do this.
"""
system_prompt += f"""
Here is the context of the book:
{self.text}
"""
return system_prompt
def chat(self, message, history):
messages = [{"role": "system", "content": self.system_prompt()}] + history + [{"role": "user", "content": message}]
response = self.openai_client.chat.completions.create(
model="gpt-4o-mini",
messages=messages,
)
return response.choices[0].message.content
def respond(
message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p,
):
messages = [{"role": "system", "content": system_message}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
yield response
"""
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
"""
demo = gr.ChatInterface(
respond,
additional_inputs=[
gr.Textbox(value="Hello, You are a friendly Chatbot.", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)",
),
],
)
if __name__ == "__main__":
harold = Harold()
gr.ChatInterface(harold.chat, type="messages").launch()