Spaces:
Sleeping
Sleeping
| from langchain.chat_models import ChatOpenAI | |
| from langchain.schema import AIMessage, HumanMessage | |
| import openai | |
| import gradio as gr | |
| import os | |
| os.environ["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY") # Replace with your key | |
| llm = ChatOpenAI(temperature=1.0, model='gpt-3.5-turbo-1106') | |
| def predict(message, history): | |
| global history_langchain_format # Create global history list for the chat | |
| history_langchain_format = [] | |
| for human, ai in history: | |
| history_langchain_format.append(HumanMessage(content=human)) | |
| history_langchain_format.append(AIMessage(content=ai)) | |
| history_langchain_format.append(HumanMessage(content=message)) | |
| gpt_response = llm(history_langchain_format) | |
| return gpt_response.content | |
| gr.ChatInterface(fn=predict, title= 'Simple Chat Box', description= 'A simple chatbox hosted in HF by Adhya').launch() |