Spaces:
Sleeping
Sleeping
| # pixal_agent_full.py | |
| import os | |
| import datetime | |
| import gradio as gr | |
| import requests | |
| from typing import Optional, List | |
| from langchain.llms.base import LLM | |
| from langchain.agents import initialize_agent, AgentType,load_tools | |
| from langchain.tools import Tool | |
| from langchain_experimental.tools.python.tool import PythonREPLTool | |
| from langchain_community.retrievers import WikipediaRetriever | |
| from langchain.tools.retriever import create_retriever_tool | |
| retriever = WikipediaRetriever() | |
| retriever_tool = create_retriever_tool( | |
| retriever, | |
| name="wiki_search", | |
| description="μν€λ°±κ³Όμμ νμν μ 보λ₯Ό λΆλ¬μ΅λλ€.κ²°κ΄΄λ₯Ό κ²μ¦νμ¬ μ¬μ©νμμ€.", | |
| ) | |
| # ββββββββββββββββββββββββββββββ | |
| # β GitHub Models LLM | |
| # ββββββββββββββββββββββββββββββ | |
| class GitHubModelLLM(LLM): | |
| model: str = "openai/gpt-4.1" | |
| endpoint: str = "https://models.github.ai/inference" | |
| token: Optional[str] = None | |
| def _llm_type(self) -> str: | |
| return "github_models_api" | |
| def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: | |
| if not self.token: | |
| raise ValueError("GitHub API tokenμ΄ νμν©λλ€.") | |
| headers = { | |
| "Authorization": f"Bearer {self.token}", | |
| "Content-Type": "application/json", | |
| } | |
| body = {"model": self.model, "messages": [{"role": "user", "content": prompt}]} | |
| resp = requests.post(f"{self.endpoint}/chat/completions", json=body, headers=headers) | |
| if resp.status_code != 200: | |
| raise ValueError(f"API μ€λ₯: {resp.status_code} - {resp.text}") | |
| return resp.json()["choices"][0]["message"]["content"] | |
| # ββββββββββββββββββββββββββββββ | |
| # β LLM μ€μ | |
| # ββββββββββββββββββββββββββββββ | |
| token = os.getenv("GITHUB_TOKEN") or os.getenv("token") | |
| if not token: | |
| print("β οΈ GitHub Tokenμ΄ νμν©λλ€. μ: setx GITHUB_TOKEN your_token") | |
| llm = GitHubModelLLM(model="openai/gpt-4.1", token=token) | |
| # ββββββββββββββββββββββββββββββ | |
| # β LangChain κΈ°λ³Έ λꡬ λΆλ¬μ€κΈ° | |
| # ββββββββββββββββββββββββββββββ | |
| tools = load_tools( | |
| ["ddg-search", "requests_all", "llm-math"], | |
| llm=llm,allow_dangerous_tools=True | |
| )+[retriever_tool] | |
| # ββββββββββββββββββββββββββββββ | |
| # β Python μ€ν λꡬ (LangChain λ΄μ₯) | |
| # ββββββββββββββββββββββββββββββ | |
| python_tool = PythonREPLTool() | |
| tools.append(Tool(name="python_repl", func=python_tool.run, description="Python μ½λλ₯Ό μ€νν©λλ€.")) | |
| # ββββββββββββββββββββββββββββββ | |
| # β νμΌ λꡬ | |
| # ββββββββββββββββββββββββββββββ | |
| BASE_DIR = os.path.join(os.getcwd(), "pixal_files") | |
| os.makedirs(BASE_DIR, exist_ok=True) | |
| def file_write(data: str) -> str: | |
| try: | |
| name, content = data.split("\n", 1) | |
| path = os.path.join(BASE_DIR, name.strip()) | |
| with open(path, "w", encoding="utf-8") as f: | |
| f.write(content) | |
| return f"β νμΌ μ μ₯λ¨: {path}" | |
| except Exception as e: | |
| return f"β οΈ νμΌ μ μ₯ μ€λ₯: {e}" | |
| def file_read(filename: str) -> str: | |
| path = os.path.join(BASE_DIR, filename.strip()) | |
| if not os.path.exists(path): | |
| return "β νμΌ μμ" | |
| with open(path, "r", encoding="utf-8") as f: | |
| return f.read() | |
| def file_list(_="") -> str: | |
| return "\n".join(os.listdir(BASE_DIR)) | |
| def file_delete(filename: str) -> str: | |
| path = os.path.join(BASE_DIR, filename.strip()) | |
| if os.path.exists(path): | |
| os.remove(path) | |
| return f"ποΈ μμ μλ£: {filename}" | |
| return "β νμΌ μμ" | |
| # ββββββββββββββββββββββββββββββ | |
| # β μκ° λꡬ | |
| # ββββββββββββββββββββββββββββββ | |
| def time_now(_=""): | |
| now = datetime.datetime.now(datetime.timezone(datetime.timedelta(hours=9))) | |
| return f"νμ¬ μκ°: {now.strftime('%Y-%m-%d %H:%M:%S')} (Asia/Seoul)" | |
| # ββββββββββββββββββββββββββββββ | |
| # β λꡬ λ±λ‘ | |
| # ββββββββββββββββββββββββββββββ | |
| tools.extend([ | |
| Tool(name="file_write", func=file_write, description="νμΌμ μμ±/μμ ν©λλ€."), | |
| Tool(name="file_read", func=file_read, description="νμΌ λ΄μ©μ μ½μ΅λλ€."), | |
| Tool(name="file_list", func=file_list, description="νμΌ λͺ©λ‘μ νμν©λλ€."), | |
| Tool(name="file_delete", func=file_delete, description="νμΌμ μμ ν©λλ€."), | |
| Tool(name="time_now", func=time_now, description="νμ¬ μκ°μ λ°νν©λλ€."), | |
| ]) | |
| # ββββββββββββββββββββββββββββββ | |
| # β Agent μ΄κΈ°ν | |
| # ββββββββββββββββββββββββββββββ | |
| agent = initialize_agent( | |
| tools, | |
| llm, | |
| agent_type=AgentType.ZERO_SHOT_REACT_DESCRIPTION, | |
| verbose=True | |
| ) | |
| # ββββββββββββββββββββββββββββββ | |
| # β Gradio UI | |
| # ββββββββββββββββββββββββββββββ | |
| def chat(message, history): | |
| try: | |
| response = agent.run(message) | |
| except Exception as e: | |
| response = f"β οΈ μ€λ₯: {e}" | |
| history = history + [(message, response)] | |
| return history, history | |
| with gr.Blocks(theme=gr.themes.Soft(), title="PIXAL Assistant (LangChain + GitHub LLM)") as demo: | |
| gr.Markdown(""" | |
| ## π€ PIXAL Assistant | |
| **LangChain κΈ°λ° λ©ν°ν΄ μμ΄μ νΈ** | |
| π§° DuckDuckGo Β· Wikipedia Β· YouTube Β· Math Β· Requests Β· Python REPL Β· File Β· Time | |
| """) | |
| chatbot = gr.Chatbot(label="PIXAL λν", height=600, latex=True) | |
| msg = gr.Textbox(label="λ©μμ§", placeholder="λͺ λ Ή λλ μ§λ¬Έμ μ λ ₯νμΈμ...") | |
| clear = gr.Button("μ΄κΈ°ν") | |
| msg.submit(chat, [msg, chatbot], [chatbot, chatbot]) | |
| clear.click(lambda: None, None, chatbot, queue=False) | |
| if __name__ == "__main__": | |
| demo.launch(server_name="0.0.0.0", server_port=7860) | |