Spaces:
Runtime error
Runtime error
| import os | |
| import gradio as gr | |
| from langchain.chat_models import ChatOpenAI | |
| from langchain import LLMChain, PromptTemplate | |
| from langchain.memory import ConversationBufferMemory | |
| # Set OpenAI API Key | |
| OPENAI_API_KEY = os.getenv('OPENAI_API_KEY') | |
| if not OPENAI_API_KEY: | |
| raise ValueError("OpenAI API Key is not set. Please set the 'OPENAI_API_KEY' environment variable.") | |
| # Define the template for the assistant | |
| template = """You are a helpful assistant to answer all user queries. | |
| {chat_history} | |
| User: {user_message} | |
| Chatbot:""" | |
| prompt = PromptTemplate( | |
| input_variables=["chat_history", "user_message"], | |
| template=template | |
| ) | |
| memory = ConversationBufferMemory(memory_key="chat_history") | |
| llm_chain = LLMChain( | |
| llm=ChatOpenAI(temperature=0.5, model_name="gpt-3.5-turbo"), | |
| prompt=prompt, | |
| verbose=True, | |
| memory=memory, | |
| ) | |
| # Function to get response | |
| def get_text_response(user_message, history=None): | |
| response = llm_chain.predict(user_message=user_message) | |
| return response | |
| # Gradio Chat Interface | |
| demo = gr.ChatInterface(get_text_response, type="messages") | |
| # Launch the Gradio app | |
| if _name_ == "_main_": | |
| demo.launch(share=True, debug=True) |