Spaces:
Sleeping
Sleeping
| import os | |
| import gradio as gr | |
| from langchain.chat_models import ChatOpenAI | |
| from langchain.chains import ConversationChain | |
| from langchain.memory import ConversationBufferMemory | |
| # Load OpenAI API key from environment variable | |
| openai_api_key = os.getenv("OPENAI_API_KEY") | |
| # Initialize the language model | |
| llm = ChatOpenAI(temperature=0.7, openai_api_key=openai_api_key) | |
| # Initialize conversation memory | |
| memory = ConversationBufferMemory() | |
| # Create a conversation chain | |
| conversation = ConversationChain(llm=llm, memory=memory) | |
| # Define Gradio interface | |
| def chat_with_agent(user_input): | |
| response = conversation.predict(input=user_input) | |
| return response | |
| iface = gr.Interface(fn=chat_with_agent, | |
| inputs=gr.Textbox(lines=2, placeholder="Ask me anything..."), | |
| outputs="text", | |
| title="LangChain Agent with GPT-3.5", | |
| description="A simple conversational agent using LangChain and OpenAI GPT-3.5") | |
| if __name__ == "__main__": | |
| iface.launch() | |