Spaces:
Runtime error
Runtime error
| from dotenv import load_dotenv | |
| import os | |
| from langchain_openai import ChatOpenAI, OpenAIEmbeddings | |
| from langchain_astradb import AstraDBVectorStore | |
| from langchain.agents import initialize_agent, AgentType | |
| from langchain.tools.retriever import create_retriever_tool | |
| from langchain.agents import AgentExecutor | |
| from github import fetch_github_issues | |
| import gradio as gr | |
| from langchain import hub | |
| from note import note_tool | |
| load_dotenv() | |
| def connet_to_vstore(): | |
| embeddings = OpenAIEmbeddings() | |
| desire_namespace = os.getenv("ASTRA_DB_KEYSPACE") | |
| ASTRA_DB_APPLICATION_TOKEN = os.getenv("ASTRA_DB_APPLICATION_TOKEN") | |
| ASTRA_DB_API_ENDPOINT = os.getenv("ASTRA_DB_API_ENDPOINT") | |
| if desire_namespace: | |
| ASTRA_DB_KEYSPACE = desire_namespace | |
| else: | |
| ASTRA_DB_KEYSPACE = None | |
| vstore = AstraDBVectorStore( | |
| embedding=embeddings, | |
| collection_name="github", | |
| namespace=ASTRA_DB_KEYSPACE, | |
| api_endpoint=ASTRA_DB_API_ENDPOINT, | |
| token=ASTRA_DB_APPLICATION_TOKEN, | |
| ) | |
| return vstore | |
| vstore = connet_to_vstore() | |
| add_to_vectorstore = input("Do you want to update the issue? (yes/no): ").lower() in ["yes", "y"] | |
| if add_to_vectorstore: | |
| owner = "Ini-design" | |
| repo = "register" | |
| issues = fetch_github_issues(owner, repo) | |
| try: | |
| vstore.delete_collection() | |
| except: | |
| pass | |
| vstore = connet_to_vstore() | |
| vstore.add_documents(issues) | |
| # results = vstore.similarity_search('flash message', k=3) | |
| # for res in results: | |
| # print(f"*{res.page_content} {res.metadata}") | |
| retriever = vstore.as_retriever(search_kwargs={"k":3}) | |
| retriever_tool = create_retriever_tool( | |
| retriever, | |
| "github_search", | |
| "search for information aabout github issues. For any question abour github issue, you must used this tools!" | |
| ) | |
| prompt = hub.pull("hwchase17/openai-functions_agent") | |
| llm = ChatOpenAI() | |
| tools = [retriever_tool], note_tool | |
| agent = initialize_agent(llm, tools, prompt) | |
| agent_executor = AgentExecutor(agent=AgentType.OPENAI_FUNCTIONS, tools=tools, verbose=True) | |
| def answer_question(question): | |
| """Function to process user question and return agent response""" | |
| if not question.strip(): | |
| return "Please enter a question." | |
| try: | |
| result = agent_executor.invoke({"input": question}) | |
| return result["output"] | |
| except Exception as e: | |
| return f"Error: {str(e)}" | |
| # Create Gradio Interface | |
| demo = gr.Interface( | |
| fn=answer_question, | |
| inputs=gr.Textbox( | |
| label="Ask about GitHub Issues", | |
| placeholder="Type your question here...", | |
| lines=3 | |
| ), | |
| outputs=gr.Textbox( | |
| label="Response", | |
| lines=5 | |
| ), | |
| title="GitHub Issues AI Agent", | |
| description="Ask questions about GitHub issues using AI-powered semantic search", | |
| examples=[ | |
| ["What are the recent issues?"], | |
| ["Tell me about bug reports"], | |
| ["What features are being requested?"] | |
| ], | |
| allow_flagging="never" | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch(debug=False) | |