Spaces:
Running
Running
File size: 6,628 Bytes
c7188b5 8a8939e f3ab7bd e9b69d7 dedbb83 c7188b5 2dd74e7 e175515 dedbb83 c7188b5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 |
# pixal_agent_full.py
import os
import datetime
import gradio as gr
import requests
from typing import Optional, List
from langchain.llms.base import LLM
from langchain.agents import initialize_agent, AgentType,load_tools
from langchain.tools import Tool
from langchain_experimental.tools.python.tool import PythonREPLTool
from langchain_community.retrievers import WikipediaRetriever
from langchain.tools.retriever import create_retriever_tool
retriever = WikipediaRetriever()
retriever_tool = create_retriever_tool(
retriever,
name="wiki_search",
description="μν€λ°±κ³Όμμ νμν μ 보λ₯Ό λΆλ¬μ΅λλ€.κ²°κ΄΄λ₯Ό κ²μ¦νμ¬ μ¬μ©νμμ€.",
)
# ββββββββββββββββββββββββββββββ
# β
GitHub Models LLM
# ββββββββββββββββββββββββββββββ
class GitHubModelLLM(LLM):
model: str = "openai/gpt-4.1"
endpoint: str = "https://models.github.ai/inference"
token: Optional[str] = None
@property
def _llm_type(self) -> str:
return "github_models_api"
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
if not self.token:
raise ValueError("GitHub API tokenμ΄ νμν©λλ€.")
headers = {
"Authorization": f"Bearer {self.token}",
"Content-Type": "application/json",
}
body = {"model": self.model, "messages": [{"role": "user", "content": prompt}]}
resp = requests.post(f"{self.endpoint}/chat/completions", json=body, headers=headers)
if resp.status_code != 200:
raise ValueError(f"API μ€λ₯: {resp.status_code} - {resp.text}")
return resp.json()["choices"][0]["message"]["content"]
# ββββββββββββββββββββββββββββββ
# β
LLM μ€μ
# ββββββββββββββββββββββββββββββ
token = os.getenv("GITHUB_TOKEN") or os.getenv("token")
if not token:
print("β οΈ GitHub Tokenμ΄ νμν©λλ€. μ: setx GITHUB_TOKEN your_token")
llm = GitHubModelLLM(model="openai/gpt-4.1", token=token)
# ββββββββββββββββββββββββββββββ
# β
LangChain κΈ°λ³Έ λꡬ λΆλ¬μ€κΈ°
# ββββββββββββββββββββββββββββββ
tools = load_tools(
["ddg-search", "requests_all", "llm-math"],
llm=llm,allow_dangerous_tools=True
)+[retriever_tool]
# ββββββββββββββββββββββββββββββ
# β
Python μ€ν λꡬ (LangChain λ΄μ₯)
# ββββββββββββββββββββββββββββββ
python_tool = PythonREPLTool()
tools.append(Tool(name="python_repl", func=python_tool.run, description="Python μ½λλ₯Ό μ€νν©λλ€."))
# ββββββββββββββββββββββββββββββ
# β
νμΌ λꡬ
# ββββββββββββββββββββββββββββββ
BASE_DIR = os.path.join(os.getcwd(), "pixal_files")
os.makedirs(BASE_DIR, exist_ok=True)
def file_write(data: str) -> str:
try:
name, content = data.split("\n", 1)
path = os.path.join(BASE_DIR, name.strip())
with open(path, "w", encoding="utf-8") as f:
f.write(content)
return f"β
νμΌ μ μ₯λ¨: {path}"
except Exception as e:
return f"β οΈ νμΌ μ μ₯ μ€λ₯: {e}"
def file_read(filename: str) -> str:
path = os.path.join(BASE_DIR, filename.strip())
if not os.path.exists(path):
return "β νμΌ μμ"
with open(path, "r", encoding="utf-8") as f:
return f.read()
def file_list(_="") -> str:
return "\n".join(os.listdir(BASE_DIR))
def file_delete(filename: str) -> str:
path = os.path.join(BASE_DIR, filename.strip())
if os.path.exists(path):
os.remove(path)
return f"ποΈ μμ μλ£: {filename}"
return "β νμΌ μμ"
# ββββββββββββββββββββββββββββββ
# β
μκ° λꡬ
# ββββββββββββββββββββββββββββββ
def time_now(_=""):
now = datetime.datetime.now(datetime.timezone(datetime.timedelta(hours=9)))
return f"νμ¬ μκ°: {now.strftime('%Y-%m-%d %H:%M:%S')} (Asia/Seoul)"
# ββββββββββββββββββββββββββββββ
# β
λꡬ λ±λ‘
# ββββββββββββββββββββββββββββββ
tools.extend([
Tool(name="file_write", func=file_write, description="νμΌμ μμ±/μμ ν©λλ€."),
Tool(name="file_read", func=file_read, description="νμΌ λ΄μ©μ μ½μ΅λλ€."),
Tool(name="file_list", func=file_list, description="νμΌ λͺ©λ‘μ νμν©λλ€."),
Tool(name="file_delete", func=file_delete, description="νμΌμ μμ ν©λλ€."),
Tool(name="time_now", func=time_now, description="νμ¬ μκ°μ λ°νν©λλ€."),
])
# ββββββββββββββββββββββββββββββ
# β
Agent μ΄κΈ°ν
# ββββββββββββββββββββββββββββββ
agent = initialize_agent(
tools,
llm,
agent_type=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
verbose=True
)
# ββββββββββββββββββββββββββββββ
# β
Gradio UI
# ββββββββββββββββββββββββββββββ
def chat(message, history):
try:
response = agent.run(message)
except Exception as e:
response = f"β οΈ μ€λ₯: {e}"
history = history + [(message, response)]
return history, history
with gr.Blocks(theme=gr.themes.Soft(), title="PIXAL Assistant (LangChain + GitHub LLM)") as demo:
gr.Markdown("""
## π€ PIXAL Assistant
**LangChain κΈ°λ° λ©ν°ν΄ μμ΄μ νΈ**
π§° DuckDuckGo Β· Wikipedia Β· YouTube Β· Math Β· Requests Β· Python REPL Β· File Β· Time
""")
chatbot = gr.Chatbot(label="PIXAL λν", height=600, latex=True)
msg = gr.Textbox(label="λ©μμ§", placeholder="λͺ
λ Ή λλ μ§λ¬Έμ μ
λ ₯νμΈμ...")
clear = gr.Button("μ΄κΈ°ν")
msg.submit(chat, [msg, chatbot], [chatbot, chatbot])
clear.click(lambda: None, None, chatbot, queue=False)
if __name__ == "__main__":
demo.launch(server_name="0.0.0.0", server_port=7860)
|