File size: 6,287 Bytes
c7188b5
 
 
 
 
 
 
8a8939e
f3ab7bd
c7188b5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e175515
c7188b5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
# pixal_agent_full.py
import os
import datetime
import gradio as gr
import requests
from typing import Optional, List
from langchain.llms.base import LLM
from langchain.agents import initialize_agent, AgentType,load_tools
from langchain.tools import Tool
from langchain.tools.python.tool import PythonREPLTool

# ──────────────────────────────
# βœ… GitHub Models LLM
# ──────────────────────────────
class GitHubModelLLM(LLM):
    model: str = "openai/gpt-4.1"
    endpoint: str = "https://models.github.ai/inference"
    token: Optional[str] = None

    @property
    def _llm_type(self) -> str:
        return "github_models_api"

    def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
        if not self.token:
            raise ValueError("GitHub API token이 ν•„μš”ν•©λ‹ˆλ‹€.")

        headers = {
            "Authorization": f"Bearer {self.token}",
            "Content-Type": "application/json",
        }
        body = {"model": self.model, "messages": [{"role": "user", "content": prompt}]}

        resp = requests.post(f"{self.endpoint}/chat/completions", json=body, headers=headers)
        if resp.status_code != 200:
            raise ValueError(f"API 였λ₯˜: {resp.status_code} - {resp.text}")
        return resp.json()["choices"][0]["message"]["content"]

# ──────────────────────────────
# βœ… LLM μ„€μ •
# ──────────────────────────────
token = os.getenv("GITHUB_TOKEN") or os.getenv("token")
if not token:
    print("⚠️ GitHub Token이 ν•„μš”ν•©λ‹ˆλ‹€. 예: setx GITHUB_TOKEN your_token")

llm = GitHubModelLLM(model="openai/gpt-4.1", token=token)

# ──────────────────────────────
# βœ… LangChain κΈ°λ³Έ 도ꡬ 뢈러였기
# ──────────────────────────────
tools = load_tools(
    ["duckduckgo-search", "requests-all", "llm-math", "wikipedia", "youtube-search"],
    llm=llm,allow_dangerous_tools=True
)

# ──────────────────────────────
# βœ… Python μ‹€ν–‰ 도ꡬ (LangChain λ‚΄μž₯)
# ──────────────────────────────
python_tool = PythonREPLTool()
tools.append(Tool(name="python_repl", func=python_tool.run, description="Python μ½”λ“œλ₯Ό μ‹€ν–‰ν•©λ‹ˆλ‹€."))

# ──────────────────────────────
# βœ… 파일 도ꡬ
# ──────────────────────────────
BASE_DIR = os.path.join(os.getcwd(), "pixal_files")
os.makedirs(BASE_DIR, exist_ok=True)

def file_write(data: str) -> str:
    try:
        name, content = data.split("\n", 1)
        path = os.path.join(BASE_DIR, name.strip())
        with open(path, "w", encoding="utf-8") as f:
            f.write(content)
        return f"βœ… 파일 μ €μž₯됨: {path}"
    except Exception as e:
        return f"⚠️ 파일 μ €μž₯ 였λ₯˜: {e}"

def file_read(filename: str) -> str:
    path = os.path.join(BASE_DIR, filename.strip())
    if not os.path.exists(path):
        return "❌ 파일 μ—†μŒ"
    with open(path, "r", encoding="utf-8") as f:
        return f.read()

def file_list(_="") -> str:
    return "\n".join(os.listdir(BASE_DIR))

def file_delete(filename: str) -> str:
    path = os.path.join(BASE_DIR, filename.strip())
    if os.path.exists(path):
        os.remove(path)
        return f"πŸ—‘οΈ μ‚­μ œ μ™„λ£Œ: {filename}"
    return "❌ 파일 μ—†μŒ"

# ──────────────────────────────
# βœ… μ‹œκ°„ 도ꡬ
# ──────────────────────────────
def time_now(_=""):
    now = datetime.datetime.now(datetime.timezone(datetime.timedelta(hours=9)))
    return f"ν˜„μž¬ μ‹œκ°: {now.strftime('%Y-%m-%d %H:%M:%S')} (Asia/Seoul)"

# ──────────────────────────────
# βœ… 도ꡬ 등둝
# ──────────────────────────────
tools.extend([
    Tool(name="file_write", func=file_write, description="νŒŒμΌμ„ 생성/μˆ˜μ •ν•©λ‹ˆλ‹€."),
    Tool(name="file_read", func=file_read, description="파일 λ‚΄μš©μ„ μ½μŠ΅λ‹ˆλ‹€."),
    Tool(name="file_list", func=file_list, description="파일 λͺ©λ‘μ„ ν‘œμ‹œν•©λ‹ˆλ‹€."),
    Tool(name="file_delete", func=file_delete, description="νŒŒμΌμ„ μ‚­μ œν•©λ‹ˆλ‹€."),
    Tool(name="time_now", func=time_now, description="ν˜„μž¬ μ‹œκ°„μ„ λ°˜ν™˜ν•©λ‹ˆλ‹€."),
])

# ──────────────────────────────
# βœ… Agent μ΄ˆκΈ°ν™”
# ──────────────────────────────
agent = initialize_agent(
    tools,
    llm,
    agent_type=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
    verbose=True
)

# ──────────────────────────────
# βœ… Gradio UI
# ──────────────────────────────
def chat(message, history):
    try:
        response = agent.run(message)
    except Exception as e:
        response = f"⚠️ 였λ₯˜: {e}"
    history = history + [(message, response)]
    return history, history

with gr.Blocks(theme=gr.themes.Soft(), title="PIXAL Assistant (LangChain + GitHub LLM)") as demo:
    gr.Markdown("""
    ## πŸ€– PIXAL Assistant  
    **LangChain 기반 λ©€ν‹°νˆ΄ μ—μ΄μ „νŠΈ**  
    🧰 DuckDuckGo · Wikipedia · YouTube · Math · Requests · Python REPL · File · Time
    """)
    chatbot = gr.Chatbot(label="PIXAL λŒ€ν™”", height=600, latex=True)
    msg = gr.Textbox(label="λ©”μ‹œμ§€", placeholder="λͺ…λ Ή λ˜λŠ” μ§ˆλ¬Έμ„ μž…λ ₯ν•˜μ„Έμš”...")
    clear = gr.Button("μ΄ˆκΈ°ν™”")

    msg.submit(chat, [msg, chatbot], [chatbot, chatbot])
    clear.click(lambda: None, None, chatbot, queue=False)

if __name__ == "__main__":
    demo.launch(server_name="0.0.0.0", server_port=7860)