Spaces:
Running
Running
File size: 6,649 Bytes
c7188b5 8a8939e 81ae03a f3ab7bd e9b69d7 3fefcc3 e5ac220 3fefcc3 33990d0 86c9f34 c7188b5 9febc25 c7188b5 2dd74e7 e175515 33990d0 c7188b5 81ae03a c7188b5 d60e9d2 c7188b5 d167a15 d60e9d2 d167a15 c7188b5 d60e9d2 d167a15 d60e9d2 d167a15 c7188b5 35475f8 33990d0 c7188b5 81ae03a ce3c490 c7188b5 ce3c490 c7188b5 10d06f8 c7188b5 aaead94 c7188b5 aaead94 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 |
# pixal_agent_full.py
import os
import datetime
import gradio as gr
import requests
from typing import Optional, List
from langchain.llms.base import LLM
from langchain.agents import initialize_agent, AgentType,load_tools
from langchain.agents import AgentExecutor, create_structured_chat_agent
from langchain.tools import Tool
from langchain_experimental.tools.python.tool import PythonREPLTool
import queue
from typing import Any, Dict
import gradio as gr
from langchain.callbacks.base import BaseCallbackHandler
from langchain.tools import YouTubeSearchTool as YTS
# 2. 컀μ€ν
μ½λ°± νΈλ€λ¬
from langchain_community.retrievers import WikipediaRetriever
from langchain.tools.retriever import create_retriever_tool
retriever = WikipediaRetriever(lang="ko",top_k_results=10)
wiki=Tool(func=retriever.get_relevant_documents,name="WIKI SEARCH",description="μν€λ°±κ³Όμμ νμν μ 보λ₯Ό λΆλ¬μ΅λλ€.κ²°κ΄΄λ₯Ό κ²μ¦νμ¬ μ¬μ©νμμ€.")
# ββββββββββββββββββββββββββββββ
# β
GitHub Models LLM
# ββββββββββββββββββββββββββββββ
class GitHubModelLLM(LLM):
model: str = "openai/gpt-4.1"
endpoint: str = "https://models.github.ai/inference"
token: Optional[str] = None
@property
def _llm_type(self) -> str:
return "github_models_api"
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
if not self.token:
raise ValueError("GitHub API tokenμ΄ νμν©λλ€.")
headers = {
"Authorization": "Bearer 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",
"Content-Type": "application/json",
}
body = {"model": self.model, "messages": [{"role": "user", "content": prompt}]}
resp = requests.post(f"{self.endpoint}/chat/completions", json=body, headers=headers)
if resp.status_code != 200:
raise ValueError(f"API μ€λ₯: {resp.status_code} - {resp.text}")
return resp.json()["choices"][0]["message"]["content"]
# ββββββββββββββββββββββββββββββ
# β
LLM μ€μ
# ββββββββββββββββββββββββββββββ
token = os.getenv("GITHUB_TOKEN") or os.getenv("token")
if not token:
print("β οΈ GitHub Tokenμ΄ νμν©λλ€. μ: setx GITHUB_TOKEN your_token")
llm = GitHubModelLLM(model="openai/gpt-4.1", token=token)
# ββββββββββββββββββββββββββββββ
# β
LangChain κΈ°λ³Έ λꡬ λΆλ¬μ€κΈ°
# ββββββββββββββββββββββββββββββ
tools = load_tools(
["ddg-search", "requests_all", "llm-math"],
llm=llm,allow_dangerous_tools=True
)+[YTS()]+[wiki]
# ββββββββββββββββββββββββββββββ
# β
Python μ€ν λꡬ (LangChain λ΄μ₯)
# ββββββββββββββββββββββββββββββ
python_tool = PythonREPLTool()
tools.append(Tool(name="python_repl", func=python_tool.run, description="Python μ½λλ₯Ό μ€νν©λλ€."))
from langchain import hub
prompt=hub.pull("hwchase17/structured-chat-agent")
# ββββββββββββββββββββββββββββββ
# β
νμΌ λꡬ
# ββββββββββββββββββββββββββββββ
# ββββββββββββββββββββββββββββββ
# β
μ νν νκ΅ μκ° ν¨μ (Asia/Seoul)
# ββββββββββββββββββββββββββββββ
import requests
from datetime import datetime
from zoneinfo import ZoneInfo
def time_now(_=""):
try:
# μ νν UTC μκ°μ μΈλΆ APIμμ κ°μ Έμ΄
resp = requests.get("https://timeapi.io/api/Time/current/zone?timeZone=Asia/Seoul", timeout=5)
if resp.status_code == 200:
data = resp.json()
dt = data["dateTime"].split(".")[0].replace("T", " ")
return f"νμ¬ μκ°: {dt} (Asia/Seoul, μλ² κΈ°μ€ NTP λκΈ°ν)"
else:
# API μ€ν¨ μ λ‘컬 μμ€ν
μκ°μΌλ‘ λ체
tz = ZoneInfo("Asia/Seoul")
now = datetime.now(tz)
return f"νμ¬ μκ°(λ‘컬): {now.strftime('%Y-%m-%d %H:%M:%S')} (Asia/Seoul)"
except Exception as e:
tz = ZoneInfo("Asia/Seoul")
now = datetime.now(tz)
return f"νμ¬ μκ°(λ°±μ
): {now.strftime('%Y-%m-%d %H:%M:%S')} (Asia/Seoul, μ€λ₯: {e})"
# ββββββββββββββββββββββββββββββ
# β
λꡬ λ±λ‘
# ββββββββββββββββββββββββββββββ
tools.extend([Tool(name="time_now", func=time_now, description="νμ¬ μκ°μ λ°νν©λλ€.")])
from langchain.memory import ConversationBufferMemory as MEM
from langchain.agents.agent_toolkits import FileManagementToolkit as FMT
tools.extend(FMT(root_dir=str(os.getcwd())).get_tools())
# ββββββββββββββββββββββββββββββ
# β
Agent μ΄κΈ°ν
# ββββββββββββββββββββββββββββββ
mem=MEM()
agent = create_structured_chat_agent(llm, tools, prompt)
agent= AgentExecutor(agent=agent, tools=tools,memory=mem)
# ββββββββββββββββββββββββββββββ
# β
Gradio UI
# ββββββββββββββββββββββββββββββ
def chat(message, history):
try:
response = agent.run(message)
except Exception as e:
response = f"β οΈ μ€λ₯: {e}"
history = history + [(message, response)]
return history,history
with gr.Blocks(theme=gr.themes.Soft(), title="PIXAL Assistant (LangChain + GitHub LLM)") as demo:
gr.Markdown("""
## π€ PIXAL Assistant
**LangChain κΈ°λ° λ©ν°ν΄ μμ΄μ νΈ**
π§° DuckDuckGo Β· Wikipedia Β· Math Β· Requests Β· Python REPL Β· File Β· Time
""")
chatbot = gr.Chatbot(label="PIXAL λν", height=600)
msg = gr.Textbox(label="λ©μμ§", placeholder="λͺ
λ Ή λλ μ§λ¬Έμ μ
λ ₯νμΈμ...")
clear = gr.Button("μ΄κΈ°ν")
msg.submit(chat, [msg, chatbot], [chatbot, chatbot])
clear.click(lambda: None, None, chatbot, queue=False)
if __name__ == "__main__":
demo.launch()
|