Spaces:
Running
Running
| # pixal_agent_full.py | |
| import os | |
| import datetime | |
| import gradio as gr | |
| import requests | |
| from typing import Optional, List | |
| from langchain.llms.base import LLM | |
| from langchain.agents import initialize_agent, AgentType,load_tools | |
| from langchain.agents import AgentExecutor, create_structured_chat_agent | |
| from langchain.tools import Tool | |
| from langchain_experimental.tools.python.tool import PythonREPLTool | |
| import queue | |
| from typing import Any, Dict | |
| import gradio as gr | |
| from langchain.callbacks.base import BaseCallbackHandler | |
| from langchain.tools import YouTubeSearchTool as YTS | |
| # 2. 컀μ€ν μ½λ°± νΈλ€λ¬ | |
| from langchain_community.retrievers import WikipediaRetriever | |
| from langchain.tools.retriever import create_retriever_tool | |
| retriever = WikipediaRetriever(lang="ko",top_k_results=10) | |
| wiki=Tool(func=retriever.get_relevant_documents,name="WIKI SEARCH",description="μν€λ°±κ³Όμμ νμν μ 보λ₯Ό λΆλ¬μ΅λλ€.κ²°κ΄΄λ₯Ό κ²μ¦νμ¬ μ¬μ©νμμ€.") | |
| # ββββββββββββββββββββββββββββββ | |
| # β GitHub Models LLM | |
| # ββββββββββββββββββββββββββββββ | |
| class GitHubModelLLM(LLM): | |
| model: str = "openai/gpt-4.1" | |
| endpoint: str = "https://models.github.ai/inference" | |
| token: Optional[str] = None | |
| def _llm_type(self) -> str: | |
| return "github_models_api" | |
| def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: | |
| if not self.token: | |
| raise ValueError("GitHub API tokenμ΄ νμν©λλ€.") | |
| headers = { | |
| "Authorization": "Bearer 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", | |
| "Content-Type": "application/json", | |
| } | |
| body = {"model": self.model, "messages": [{"role": "user", "content": prompt}]} | |
| resp = requests.post(f"{self.endpoint}/chat/completions", json=body, headers=headers) | |
| if resp.status_code != 200: | |
| raise ValueError(f"API μ€λ₯: {resp.status_code} - {resp.text}") | |
| return resp.json()["choices"][0]["message"]["content"] | |
| # ββββββββββββββββββββββββββββββ | |
| # β LLM μ€μ | |
| # ββββββββββββββββββββββββββββββ | |
| token = os.getenv("GITHUB_TOKEN") or os.getenv("token") | |
| if not token: | |
| print("β οΈ GitHub Tokenμ΄ νμν©λλ€. μ: setx GITHUB_TOKEN your_token") | |
| llm = GitHubModelLLM(model="openai/gpt-4.1", token=token) | |
| # ββββββββββββββββββββββββββββββ | |
| # β LangChain κΈ°λ³Έ λꡬ λΆλ¬μ€κΈ° | |
| # ββββββββββββββββββββββββββββββ | |
| tools = load_tools( | |
| ["ddg-search", "requests_all", "llm-math"], | |
| llm=llm,allow_dangerous_tools=True | |
| )+[YTS()]+[wiki] | |
| # ββββββββββββββββββββββββββββββ | |
| # β Python μ€ν λꡬ (LangChain λ΄μ₯) | |
| # ββββββββββββββββββββββββββββββ | |
| python_tool = PythonREPLTool() | |
| tools.append(Tool(name="python_repl", func=python_tool.run, description="Python μ½λλ₯Ό μ€νν©λλ€.")) | |
| from langchain import hub | |
| prompt=hub.pull("hwchase17/structured-chat-agent") | |
| # ββββββββββββββββββββββββββββββ | |
| # β νμΌ λꡬ | |
| # ββββββββββββββββββββββββββββββ | |
| # ββββββββββββββββββββββββββββββ | |
| # β μ νν νκ΅ μκ° ν¨μ (Asia/Seoul) | |
| # ββββββββββββββββββββββββββββββ | |
| import requests | |
| from datetime import datetime | |
| from zoneinfo import ZoneInfo | |
| def time_now(_=""): | |
| try: | |
| # μ νν UTC μκ°μ μΈλΆ APIμμ κ°μ Έμ΄ | |
| resp = requests.get("https://timeapi.io/api/Time/current/zone?timeZone=Asia/Seoul", timeout=5) | |
| if resp.status_code == 200: | |
| data = resp.json() | |
| dt = data["dateTime"].split(".")[0].replace("T", " ") | |
| return f"νμ¬ μκ°: {dt} (Asia/Seoul, μλ² κΈ°μ€ NTP λκΈ°ν)" | |
| else: | |
| # API μ€ν¨ μ λ‘컬 μμ€ν μκ°μΌλ‘ λ체 | |
| tz = ZoneInfo("Asia/Seoul") | |
| now = datetime.now(tz) | |
| return f"νμ¬ μκ°(λ‘컬): {now.strftime('%Y-%m-%d %H:%M:%S')} (Asia/Seoul)" | |
| except Exception as e: | |
| tz = ZoneInfo("Asia/Seoul") | |
| now = datetime.now(tz) | |
| return f"νμ¬ μκ°(λ°±μ ): {now.strftime('%Y-%m-%d %H:%M:%S')} (Asia/Seoul, μ€λ₯: {e})" | |
| # ββββββββββββββββββββββββββββββ | |
| # β λꡬ λ±λ‘ | |
| # ββββββββββββββββββββββββββββββ | |
| tools.extend([Tool(name="time_now", func=time_now, description="νμ¬ μκ°μ λ°νν©λλ€.")]) | |
| from langchain.memory import ConversationBufferMemory as MEM | |
| from langchain.agents.agent_toolkits import FileManagementToolkit as FMT | |
| tools.extend(FMT(root_dir=str(os.getcwd())).get_tools()) | |
| # ββββββββββββββββββββββββββββββ | |
| # β Agent μ΄κΈ°ν | |
| # ββββββββββββββββββββββββββββββ | |
| mem=MEM() | |
| agent = create_structured_chat_agent(llm, tools, prompt) | |
| agent= AgentExecutor(agent=agent, tools=tools,memory=mem) | |
| # ββββββββββββββββββββββββββββββ | |
| # β Gradio UI | |
| # ββββββββββββββββββββββββββββββ | |
| def chat(message, history): | |
| try: | |
| response = agent.run(message) | |
| except Exception as e: | |
| response = f"β οΈ μ€λ₯: {e}" | |
| history = history + [(message, response)] | |
| return history,history | |
| with gr.Blocks(theme=gr.themes.Soft(), title="PIXAL Assistant (LangChain + GitHub LLM)") as demo: | |
| gr.Markdown(""" | |
| ## π€ PIXAL Assistant | |
| **LangChain κΈ°λ° λ©ν°ν΄ μμ΄μ νΈ** | |
| π§° DuckDuckGo Β· Wikipedia Β· Math Β· Requests Β· Python REPL Β· File Β· Time | |
| """) | |
| chatbot = gr.Chatbot(label="PIXAL λν", height=600) | |
| msg = gr.Textbox(label="λ©μμ§", placeholder="λͺ λ Ή λλ μ§λ¬Έμ μ λ ₯νμΈμ...") | |
| clear = gr.Button("μ΄κΈ°ν") | |
| msg.submit(chat, [msg, chatbot], [chatbot, chatbot]) | |
| clear.click(lambda: None, None, chatbot, queue=False) | |
| if __name__ == "__main__": | |
| demo.launch() | |