Coaching_TOM / app.py
notrito's picture
Update app.py
5c17834 verified
import os
import openai
from openai import OpenAI
import json
import re
from dotenv import load_dotenv, find_dotenv
import io
import IPython.display
from PIL import Image
import base64
import requests, json
import gradio as gr
requests.adapters.DEFAULT_TIMEOUT = 60
import random
# API Keys
_ = load_dotenv(find_dotenv()) # read local .env file
openai.api_key = os.environ['OPENAI_API_KEY']
client = OpenAI()
def chat(system_prompt, user_prompt, model='gpt-3.5-turbo', temperature=0, verbose=False):
messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_prompt}
]
completion = client.chat.completions.create(
model=model,
temperature=0,
messages=messages
)
ai_response = completion.choices[0].message.content
if verbose:
print('User prompt:', user_prompt)
print('GPT response:', ai_response)
return ai_response
def respond(message, chat_history):
# No LLM here, just respond with a random pre-made message
bot_message = random.choice(["1-Tell me ore about it",
"2-Cool, but I'm not interested",
"3-Hmmmm, ok then"])
chat_history.append((message, bot_message))
return "", chat_history
with gr.Blocks() as demo:
chatbot = gr.Chatbot(height=240) # just to fit the notebook
msg = gr.Textbox(label="Prompt")
btn = gr.Button("Submit")
clear = gr.ClearButton(components=[msg, chatbot], value="Clear console")
btn.click(respond, inputs=[msg, chatbot], outputs=[msg, chatbot])
msg.submit(respond, inputs=[msg, chatbot], outputs=[msg, chatbot]) # Press enter to submit
gr.close_all()
demo.launch(share=True)