import openai from utils import get_doc_tools from llama_index.llms.openai import OpenAI from llama_index.agent.openai import OpenAIAgent import gradio as gr openai.api_key = "sk-proj-OPe6yvFXnluDbsk4inaoT3BlbkFJ7mf26VTWhErEiXXF0Dt2" vector_tool, summary_tool = get_doc_tools("metagpt.pdf", "metagpt") llm = OpenAI(model="gpt-3.5-turbo", temperature=0) agent = OpenAIAgent.from_tools( [vector_tool, summary_tool], llm=llm, verbose=False ) def get_agent_response(message, history=[]): # We need to have the history parameter here because ChatInterface requires it response = agent.chat( message ) return (str(response)) # Create the Gradio interface chatbot_interface = gr.ChatInterface( fn=get_agent_response, title="MetaGPT Agent Chatbot", description="Ask questions about the MetaGPT agent roles and their communication." ) # Launch the interface chatbot_interface.launch()