Pixal1.0 / app.py
peterpeter8585's picture
Update app.py
aaead94 verified
raw
history blame
6.65 kB
# pixal_agent_full.py
import os
import datetime
import gradio as gr
import requests
from typing import Optional, List
from langchain.llms.base import LLM
from langchain.agents import initialize_agent, AgentType,load_tools
from langchain.agents import AgentExecutor, create_structured_chat_agent
from langchain.tools import Tool
from langchain_experimental.tools.python.tool import PythonREPLTool
import queue
from typing import Any, Dict
import gradio as gr
from langchain.callbacks.base import BaseCallbackHandler
from langchain.tools import YouTubeSearchTool as YTS
# 2. μ»€μŠ€ν…€ 콜백 ν•Έλ“€λŸ¬
from langchain_community.retrievers import WikipediaRetriever
from langchain.tools.retriever import create_retriever_tool
retriever = WikipediaRetriever(lang="ko",top_k_results=10)
wiki=Tool(func=retriever.get_relevant_documents,name="WIKI SEARCH",description="μœ„ν‚€λ°±κ³Όμ—μ„œ ν•„μš”ν•œ 정보λ₯Ό λΆˆλŸ¬μ˜΅λ‹ˆλ‹€.κ²°κ΄΄λ₯Ό κ²€μ¦ν•˜μ—¬ μ‚¬μš©ν•˜μ‹œμ˜€.")
# ──────────────────────────────
# βœ… GitHub Models LLM
# ──────────────────────────────
class GitHubModelLLM(LLM):
model: str = "openai/gpt-4.1"
endpoint: str = "https://models.github.ai/inference"
token: Optional[str] = None
@property
def _llm_type(self) -> str:
return "github_models_api"
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
if not self.token:
raise ValueError("GitHub API token이 ν•„μš”ν•©λ‹ˆλ‹€.")
headers = {
"Authorization": "Bearer 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",
"Content-Type": "application/json",
}
body = {"model": self.model, "messages": [{"role": "user", "content": prompt}]}
resp = requests.post(f"{self.endpoint}/chat/completions", json=body, headers=headers)
if resp.status_code != 200:
raise ValueError(f"API 였λ₯˜: {resp.status_code} - {resp.text}")
return resp.json()["choices"][0]["message"]["content"]
# ──────────────────────────────
# βœ… LLM μ„€μ •
# ──────────────────────────────
token = os.getenv("GITHUB_TOKEN") or os.getenv("token")
if not token:
print("⚠️ GitHub Token이 ν•„μš”ν•©λ‹ˆλ‹€. 예: setx GITHUB_TOKEN your_token")
llm = GitHubModelLLM(model="openai/gpt-4.1", token=token)
# ──────────────────────────────
# βœ… LangChain κΈ°λ³Έ 도ꡬ 뢈러였기
# ──────────────────────────────
tools = load_tools(
["ddg-search", "requests_all", "llm-math"],
llm=llm,allow_dangerous_tools=True
)+[YTS()]+[wiki]
# ──────────────────────────────
# βœ… Python μ‹€ν–‰ 도ꡬ (LangChain λ‚΄μž₯)
# ──────────────────────────────
python_tool = PythonREPLTool()
tools.append(Tool(name="python_repl", func=python_tool.run, description="Python μ½”λ“œλ₯Ό μ‹€ν–‰ν•©λ‹ˆλ‹€."))
from langchain import hub
prompt=hub.pull("hwchase17/structured-chat-agent")
# ──────────────────────────────
# βœ… 파일 도ꡬ
# ──────────────────────────────
# ──────────────────────────────
# βœ… μ •ν™•ν•œ ν•œκ΅­ μ‹œκ°„ ν•¨μˆ˜ (Asia/Seoul)
# ──────────────────────────────
import requests
from datetime import datetime
from zoneinfo import ZoneInfo
def time_now(_=""):
try:
# μ •ν™•ν•œ UTC μ‹œκ°μ„ μ™ΈλΆ€ APIμ—μ„œ κ°€μ Έμ˜΄
resp = requests.get("https://timeapi.io/api/Time/current/zone?timeZone=Asia/Seoul", timeout=5)
if resp.status_code == 200:
data = resp.json()
dt = data["dateTime"].split(".")[0].replace("T", " ")
return f"ν˜„μž¬ μ‹œκ°: {dt} (Asia/Seoul, μ„œλ²„ κΈ°μ€€ NTP 동기화)"
else:
# API μ‹€νŒ¨ μ‹œ 둜컬 μ‹œμŠ€ν…œ μ‹œκ°μœΌλ‘œ λŒ€μ²΄
tz = ZoneInfo("Asia/Seoul")
now = datetime.now(tz)
return f"ν˜„μž¬ μ‹œκ°(둜컬): {now.strftime('%Y-%m-%d %H:%M:%S')} (Asia/Seoul)"
except Exception as e:
tz = ZoneInfo("Asia/Seoul")
now = datetime.now(tz)
return f"ν˜„μž¬ μ‹œκ°(λ°±μ—…): {now.strftime('%Y-%m-%d %H:%M:%S')} (Asia/Seoul, 였λ₯˜: {e})"
# ──────────────────────────────
# βœ… 도ꡬ 등둝
# ──────────────────────────────
tools.extend([Tool(name="time_now", func=time_now, description="ν˜„μž¬ μ‹œκ°„μ„ λ°˜ν™˜ν•©λ‹ˆλ‹€.")])
from langchain.memory import ConversationBufferMemory as MEM
from langchain.agents.agent_toolkits import FileManagementToolkit as FMT
tools.extend(FMT(root_dir=str(os.getcwd())).get_tools())
# ──────────────────────────────
# βœ… Agent μ΄ˆκΈ°ν™”
# ──────────────────────────────
mem=MEM()
agent = create_structured_chat_agent(llm, tools, prompt)
agent= AgentExecutor(agent=agent, tools=tools,memory=mem)
# ──────────────────────────────
# βœ… Gradio UI
# ──────────────────────────────
def chat(message, history):
try:
response = agent.run(message)
except Exception as e:
response = f"⚠️ 였λ₯˜: {e}"
history = history + [(message, response)]
return history,history
with gr.Blocks(theme=gr.themes.Soft(), title="PIXAL Assistant (LangChain + GitHub LLM)") as demo:
gr.Markdown("""
## πŸ€– PIXAL Assistant
**LangChain 기반 λ©€ν‹°νˆ΄ μ—μ΄μ „νŠΈ**
🧰 DuckDuckGo · Wikipedia · Math · Requests · Python REPL · File · Time
""")
chatbot = gr.Chatbot(label="PIXAL λŒ€ν™”", height=600)
msg = gr.Textbox(label="λ©”μ‹œμ§€", placeholder="λͺ…λ Ή λ˜λŠ” μ§ˆλ¬Έμ„ μž…λ ₯ν•˜μ„Έμš”...")
clear = gr.Button("μ΄ˆκΈ°ν™”")
msg.submit(chat, [msg, chatbot], [chatbot, chatbot])
clear.click(lambda: None, None, chatbot, queue=False)
if __name__ == "__main__":
demo.launch()