import os import gradio as gr from dotenv import load_dotenv from langchain_openai import ChatOpenAI, OpenAIEmbeddings from langchain_astradb import AstraDBVectorStore #from langchain.agents import AgentExecutor # from langchain.agents.openai_functions_agent import ( create_openai_functions_agent, ) from langchain.tools.retriever import create_retriever_tool from langchain import hub from github import fetch_github_issues from note import note_tool #import os #import gradio as gr #from dotenv import load_dotenv #from langchain_openai import ChatOpenAI, OpenAIEmbeddings #from langchain_community.vectorstores import AstraDB from langchain.agents import initialize_agent, AgentType #from langchain.tools.retriever import create_retriever_tool #from langchain import hub #from github import fetch_github_issues #from note import note_tool # ENV load_dotenv() # VECTOR STORE # -------------------------------------------------- def connect_to_vstore(): embeddings = OpenAIEmbeddings() return AstraDBVectorStore( embedding=embeddings, collection_name="github", namespace=os.getenv("ASTRA_DB_KEYSPACE"), api_endpoint=os.getenv("ASTRA_DB_API_ENDPOINT"), token=os.getenv("ASTRA_DB_APPLICATION_TOKEN"), ) vstore = connect_to_vstore() # -------------------------------------------------- # OPTIONAL: UPDATE VECTOR STORE # (disable input() for Spaces) # -------------------------------------------------- UPDATE_VECTORSTORE = True # change to False if not needed if UPDATE_VECTORSTORE: owner = "Ini-design" repo = "register" issues = fetch_github_issues(owner, repo) try: vstore.delete_collection() except Exception: pass vstore = connect_to_vstore() vstore.add_documents(issues) # -------------------------------------------------- # RETRIEVER TOOL # -------------------------------------------------- retriever = vstore.as_retriever(search_kwargs={"k": 3}) retriever_tool = create_retriever_tool( retriever, name="github_search", description=( "Search for information about GitHub issues. " "Use this tool for any GitHub issue-related questions." ), ) tools = [retriever_tool, note_tool] # -------------------------------------------------- # AGENT # -------------------------------------------------- prompt = hub.pull("hwchase17/openai-functions-agent") llm = ChatOpenAI( model="gpt-3.5-turbo", temperature=0, ) agent = create_openai_functions_agent( llm=llm, tools=tools, prompt=prompt, ) agent_executor = AgentExecutor( agent=agent, tools=tools, verbose=True, ) # -------------------------------------------------- # GRADIO APP # -------------------------------------------------- def answer_question(question): if not question.strip(): return "Please enter a question." try: response = agent_executor.invoke({"input": question}) return response["output"] except Exception as e: return f"Error: {e}" demo = gr.Interface( fn=answer_question, inputs=gr.Textbox( label="Ask about GitHub Issues", placeholder="Type your question here...", lines=3, ), outputs=gr.Textbox(label="Response", lines=6), title="GitHub Issues AI Agent", description="Ask questions about GitHub issues using AI-powered semantic search.", examples=[ ["What are the recent issues?"], ["Are there any open bugs?"], ["What features are being requested?"], ], ) if __name__ == "__main__": demo.launch(debug=False)