Spaces:
Sleeping
Sleeping
File size: 7,251 Bytes
c7188b5 8a8939e f3ab7bd e9b69d7 3fefcc3 e5ac220 3fefcc3 33990d0 86c9f34 c7188b5 9febc25 c7188b5 2dd74e7 e175515 33990d0 c7188b5 d60e9d2 c7188b5 d167a15 d60e9d2 d167a15 c7188b5 d60e9d2 d167a15 d60e9d2 d167a15 c7188b5 35475f8 33990d0 c7188b5 3fefcc3 c7188b5 33990d0 c7188b5 d60e9d2 3fefcc3 c7188b5 3fefcc3 c7188b5 10d06f8 c7188b5 a4e9d4c c7188b5 fbfc146 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 |
# pixal_agent_full.py
import os
import datetime
import gradio as gr
import requests
from typing import Optional, List
from langchain.llms.base import LLM
from langchain.agents import initialize_agent, AgentType,load_tools
from langchain.tools import Tool
from langchain_experimental.tools.python.tool import PythonREPLTool
import queue
from typing import Any, Dict
import gradio as gr
from langchain.callbacks.base import BaseCallbackHandler
from langchain.tools import YouTubeSearchTool as YTS
# 2. 컀μ€ν
μ½λ°± νΈλ€λ¬
class StreamingAgentCallbackHandler(BaseCallbackHandler):
def __init__(self, q: queue.Queue):
self.q = q
def on_agent_action(self, action, **kwargs):
log = f"π§ Thought: {action.log.strip()}\nπ§ Action: {action.tool}({action.tool_input})"
self.q.put(log)
def on_tool_end(self, output, **kwargs):
self.q.put(f"π¦ Observation: {output}\n")
def on_agent_finish(self, finish, **kwargs):
self.q.put(f"\nβ
Final Answer: {finish.return_values['output']}")
def on_llm_new_token(self, token: str, **kwargs):
# μ νμ μΌλ‘ ν ν° μ€νΈλ¦¬λ° μΆλ ₯
pass
def on_llm_end(self, response, **kwargs):
self.q.put("[END]")
def on_llm_error(self, error, **kwargs):
self.q.put(f"[ERROR] {str(error)}")
from langchain_community.retrievers import WikipediaRetriever
from langchain.tools.retriever import create_retriever_tool
retriever = WikipediaRetriever(lang="ko",top_k_results=10)
wiki=Tool(func=retriever.get_relevant_documents,name="WIKI SEARCH",description="μν€λ°±κ³Όμμ νμν μ 보λ₯Ό λΆλ¬μ΅λλ€.κ²°κ΄΄λ₯Ό κ²μ¦νμ¬ μ¬μ©νμμ€.")
# ββββββββββββββββββββββββββββββ
# β
GitHub Models LLM
# ββββββββββββββββββββββββββββββ
class GitHubModelLLM(LLM):
model: str = "openai/gpt-4.1"
endpoint: str = "https://models.github.ai/inference"
token: Optional[str] = None
@property
def _llm_type(self) -> str:
return "github_models_api"
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
if not self.token:
raise ValueError("GitHub API tokenμ΄ νμν©λλ€.")
headers = {
"Authorization": "Bearer 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",
"Content-Type": "application/json",
}
body = {"model": self.model, "messages": [{"role": "user", "content": prompt}]}
resp = requests.post(f"{self.endpoint}/chat/completions", json=body, headers=headers)
if resp.status_code != 200:
raise ValueError(f"API μ€λ₯: {resp.status_code} - {resp.text}")
return resp.json()["choices"][0]["message"]["content"]
# ββββββββββββββββββββββββββββββ
# β
LLM μ€μ
# ββββββββββββββββββββββββββββββ
token = os.getenv("GITHUB_TOKEN") or os.getenv("token")
if not token:
print("β οΈ GitHub Tokenμ΄ νμν©λλ€. μ: setx GITHUB_TOKEN your_token")
llm = GitHubModelLLM(model="openai/gpt-4.1", token=token)
# ββββββββββββββββββββββββββββββ
# β
LangChain κΈ°λ³Έ λꡬ λΆλ¬μ€κΈ°
# ββββββββββββββββββββββββββββββ
tools = load_tools(
["ddg-search", "requests_all", "llm-math"],
llm=llm,allow_dangerous_tools=True
)+[YTS()]+[wiki]
# ββββββββββββββββββββββββββββββ
# β
Python μ€ν λꡬ (LangChain λ΄μ₯)
# ββββββββββββββββββββββββββββββ
python_tool = PythonREPLTool()
tools.append(Tool(name="python_repl", func=python_tool.run, description="Python μ½λλ₯Ό μ€νν©λλ€."))
# ββββββββββββββββββββββββββββββ
# β
νμΌ λꡬ
# ββββββββββββββββββββββββββββββ
# ββββββββββββββββββββββββββββββ
# β
μ νν νκ΅ μκ° ν¨μ (Asia/Seoul)
# ββββββββββββββββββββββββββββββ
import requests
from datetime import datetime
from zoneinfo import ZoneInfo
def time_now(_=""):
try:
# μ νν UTC μκ°μ μΈλΆ APIμμ κ°μ Έμ΄
resp = requests.get("https://timeapi.io/api/Time/current/zone?timeZone=Asia/Seoul", timeout=5)
if resp.status_code == 200:
data = resp.json()
dt = data["dateTime"].split(".")[0].replace("T", " ")
return f"νμ¬ μκ°: {dt} (Asia/Seoul, μλ² κΈ°μ€ NTP λκΈ°ν)"
else:
# API μ€ν¨ μ λ‘컬 μμ€ν
μκ°μΌλ‘ λ체
tz = ZoneInfo("Asia/Seoul")
now = datetime.now(tz)
return f"νμ¬ μκ°(λ‘컬): {now.strftime('%Y-%m-%d %H:%M:%S')} (Asia/Seoul)"
except Exception as e:
tz = ZoneInfo("Asia/Seoul")
now = datetime.now(tz)
return f"νμ¬ μκ°(λ°±μ
): {now.strftime('%Y-%m-%d %H:%M:%S')} (Asia/Seoul, μ€λ₯: {e})"
# ββββββββββββββββββββββββββββββ
# β
λꡬ λ±λ‘
# ββββββββββββββββββββββββββββββ
tools.extend([Tool(name="time_now", func=time_now, description="νμ¬ μκ°μ λ°νν©λλ€.")])
from langchain.memory import ConversationBufferMemory as MEM
from langchain.agents.agent_toolkits import FileManagementToolkit as FMT
tools.extend(FMT(root_dir=str(os.getcwd())).get_tools())
# ββββββββββββββββββββββββββββββ
# β
Agent μ΄κΈ°ν
# ββββββββββββββββββββββββββββββ
q_stream = queue.Queue()
handler = StreamingAgentCallbackHandler(q_stream)
agent = initialize_agent(
tools,
llm,
memory=MEM(),
agent_type=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
verbose=True,
handle_parsing_errors=True,
callbacks=[handler]
)
# ββββββββββββββββββββββββββββββ
# β
Gradio UI
# ββββββββββββββββββββββββββββββ
def chat(message, history):
try:
response = agent.run(message)
except Exception as e:
response = f"β οΈ μ€λ₯: {e}"
history = history + [(message, response)]
while True:
msg = q_stream.get()
if msg == "[END]":
break
yield msg + "\n"
with gr.Blocks(theme=gr.themes.Soft(), title="PIXAL Assistant (LangChain + GitHub LLM)") as demo:
gr.Markdown("""
## π€ PIXAL Assistant
**LangChain κΈ°λ° λ©ν°ν΄ μμ΄μ νΈ**
π§° DuckDuckGo Β· Wikipedia Β· Math Β· Requests Β· Python REPL Β· File Β· Time
""")
ai1=gr.ChatInterface(chat)
if __name__ == "__main__":
ai1.launch()
|