Spaces:
Running
Running
| # pixal_agent_full.py | |
| import os | |
| import datetime | |
| import gradio as gr | |
| import requests | |
| from typing import Optional, List | |
| from langchain.llms.base import LLM | |
| from langchain.agents import initialize_agent, AgentType,load_tools | |
| from langchain.tools import Tool | |
| from langchain_experimental.tools.python.tool import PythonREPLTool | |
| import queue | |
| from typing import Any, Dict | |
| import gradio as gr | |
| from langchain.callbacks.base import BaseCallbackHandler | |
| from langchain.tools import YouTubeSearchTool as YTS | |
| # 2. 컀μ€ν μ½λ°± νΈλ€λ¬ | |
| class StreamingAgentCallbackHandler(BaseCallbackHandler): | |
| def __init__(self, q: queue.Queue): | |
| self.q = q | |
| def on_agent_action(self, action, **kwargs): | |
| log = f"π§ Thought: {action.log.strip()}\nπ§ Action: {action.tool}({action.tool_input})" | |
| self.q.put(log) | |
| def on_tool_end(self, output, **kwargs): | |
| self.q.put(f"π¦ Observation: {output}\n") | |
| def on_agent_finish(self, finish, **kwargs): | |
| self.q.put(f"\nβ Final Answer: {finish.return_values['output']}") | |
| def on_llm_new_token(self, token: str, **kwargs): | |
| # μ νμ μΌλ‘ ν ν° μ€νΈλ¦¬λ° μΆλ ₯ | |
| pass | |
| def on_llm_end(self, response, **kwargs): | |
| self.q.put("[END]") | |
| def on_llm_error(self, error, **kwargs): | |
| self.q.put(f"[ERROR] {str(error)}") | |
| from langchain_community.retrievers import WikipediaRetriever | |
| from langchain.tools.retriever import create_retriever_tool | |
| retriever = WikipediaRetriever(lang="ko",top_k_results=10) | |
| wiki=Tool(func=retriever.get_relevant_documents,name="WIKI SEARCH",description="μν€λ°±κ³Όμμ νμν μ 보λ₯Ό λΆλ¬μ΅λλ€.κ²°κ΄΄λ₯Ό κ²μ¦νμ¬ μ¬μ©νμμ€.") | |
| # ββββββββββββββββββββββββββββββ | |
| # β GitHub Models LLM | |
| # ββββββββββββββββββββββββββββββ | |
| class GitHubModelLLM(LLM): | |
| model: str = "openai/gpt-4.1" | |
| endpoint: str = "https://models.github.ai/inference" | |
| token: Optional[str] = None | |
| def _llm_type(self) -> str: | |
| return "github_models_api" | |
| def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: | |
| if not self.token: | |
| raise ValueError("GitHub API tokenμ΄ νμν©λλ€.") | |
| headers = { | |
| "Authorization": "Bearer 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", | |
| "Content-Type": "application/json", | |
| } | |
| body = {"model": self.model, "messages": [{"role": "user", "content": prompt}]} | |
| resp = requests.post(f"{self.endpoint}/chat/completions", json=body, headers=headers) | |
| if resp.status_code != 200: | |
| raise ValueError(f"API μ€λ₯: {resp.status_code} - {resp.text}") | |
| return resp.json()["choices"][0]["message"]["content"] | |
| # ββββββββββββββββββββββββββββββ | |
| # β LLM μ€μ | |
| # ββββββββββββββββββββββββββββββ | |
| token = os.getenv("GITHUB_TOKEN") or os.getenv("token") | |
| if not token: | |
| print("β οΈ GitHub Tokenμ΄ νμν©λλ€. μ: setx GITHUB_TOKEN your_token") | |
| llm = GitHubModelLLM(model="openai/gpt-4.1", token=token) | |
| # ββββββββββββββββββββββββββββββ | |
| # β LangChain κΈ°λ³Έ λꡬ λΆλ¬μ€κΈ° | |
| # ββββββββββββββββββββββββββββββ | |
| tools = load_tools( | |
| ["ddg-search", "requests_all", "llm-math"], | |
| llm=llm,allow_dangerous_tools=True | |
| )+[YTS()]+[wiki] | |
| # ββββββββββββββββββββββββββββββ | |
| # β Python μ€ν λꡬ (LangChain λ΄μ₯) | |
| # ββββββββββββββββββββββββββββββ | |
| python_tool = PythonREPLTool() | |
| tools.append(Tool(name="python_repl", func=python_tool.run, description="Python μ½λλ₯Ό μ€νν©λλ€.")) | |
| # ββββββββββββββββββββββββββββββ | |
| # β νμΌ λꡬ | |
| # ββββββββββββββββββββββββββββββ | |
| # ββββββββββββββββββββββββββββββ | |
| # β μ νν νκ΅ μκ° ν¨μ (Asia/Seoul) | |
| # ββββββββββββββββββββββββββββββ | |
| import requests | |
| from datetime import datetime | |
| from zoneinfo import ZoneInfo | |
| def time_now(_=""): | |
| try: | |
| # μ νν UTC μκ°μ μΈλΆ APIμμ κ°μ Έμ΄ | |
| resp = requests.get("https://timeapi.io/api/Time/current/zone?timeZone=Asia/Seoul", timeout=5) | |
| if resp.status_code == 200: | |
| data = resp.json() | |
| dt = data["dateTime"].split(".")[0].replace("T", " ") | |
| return f"νμ¬ μκ°: {dt} (Asia/Seoul, μλ² κΈ°μ€ NTP λκΈ°ν)" | |
| else: | |
| # API μ€ν¨ μ λ‘컬 μμ€ν μκ°μΌλ‘ λ체 | |
| tz = ZoneInfo("Asia/Seoul") | |
| now = datetime.now(tz) | |
| return f"νμ¬ μκ°(λ‘컬): {now.strftime('%Y-%m-%d %H:%M:%S')} (Asia/Seoul)" | |
| except Exception as e: | |
| tz = ZoneInfo("Asia/Seoul") | |
| now = datetime.now(tz) | |
| return f"νμ¬ μκ°(λ°±μ ): {now.strftime('%Y-%m-%d %H:%M:%S')} (Asia/Seoul, μ€λ₯: {e})" | |
| # ββββββββββββββββββββββββββββββ | |
| # β λꡬ λ±λ‘ | |
| # ββββββββββββββββββββββββββββββ | |
| tools.extend([Tool(name="time_now", func=time_now, description="νμ¬ μκ°μ λ°νν©λλ€.")]) | |
| from langchain.memory import ConversationBufferMemory as MEM | |
| from langchain.agents.agent_toolkits import FileManagementToolkit as FMT | |
| tools.extend(FMT(root_dir=str(os.getcwd())).get_tools()) | |
| # ββββββββββββββββββββββββββββββ | |
| # β Agent μ΄κΈ°ν | |
| # ββββββββββββββββββββββββββββββ | |
| q_stream = queue.Queue() | |
| handler = StreamingAgentCallbackHandler(q_stream) | |
| agent = initialize_agent( | |
| tools, | |
| llm, | |
| memory=MEM(), | |
| agent_type=AgentType.ZERO_SHOT_REACT_DESCRIPTION, | |
| verbose=True, | |
| handle_parsing_errors=True, | |
| callbacks=[handler] | |
| ) | |
| # ββββββββββββββββββββββββββββββ | |
| # β Gradio UI | |
| # ββββββββββββββββββββββββββββββ | |
| def chat(message, history): | |
| try: | |
| response = agent.run(message) | |
| except Exception as e: | |
| response = f"β οΈ μ€λ₯: {e}" | |
| history = history + [(message, response)] | |
| while True: | |
| msg = q_stream.get() | |
| if msg == "[END]": | |
| break | |
| yield msg + "\n" | |
| with gr.Blocks(theme=gr.themes.Soft(), title="PIXAL Assistant (LangChain + GitHub LLM)") as demo: | |
| gr.Markdown(""" | |
| ## π€ PIXAL Assistant | |
| **LangChain κΈ°λ° λ©ν°ν΄ μμ΄μ νΈ** | |
| π§° DuckDuckGo Β· Wikipedia Β· Math Β· Requests Β· Python REPL Β· File Β· Time | |
| """) | |
| ai1=gr.ChatInterface(chat) | |
| if __name__ == "__main__": | |
| ai1.launch() | |