peterpeter8585 commited on
Commit
c7188b5
Β·
verified Β·
1 Parent(s): 1fb5a5a

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +148 -0
app.py ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # pixal_agent_full.py
2
+ import os
3
+ import datetime
4
+ import gradio as gr
5
+ import requests
6
+ from typing import Optional, List
7
+ from langchain.llms.base import LLM
8
+ from langchain.agents import initialize_agent, AgentType
9
+ from langchain.tools import Tool, load_tools
10
+ from langchain.tools.python.tool import PythonREPLTool
11
+
12
+ # ──────────────────────────────
13
+ # βœ… GitHub Models LLM
14
+ # ──────────────────────────────
15
+ class GitHubModelLLM(LLM):
16
+ model: str = "openai/gpt-4.1"
17
+ endpoint: str = "https://models.github.ai/inference"
18
+ token: Optional[str] = None
19
+
20
+ @property
21
+ def _llm_type(self) -> str:
22
+ return "github_models_api"
23
+
24
+ def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
25
+ if not self.token:
26
+ raise ValueError("GitHub API token이 ν•„μš”ν•©λ‹ˆλ‹€.")
27
+
28
+ headers = {
29
+ "Authorization": f"Bearer {self.token}",
30
+ "Content-Type": "application/json",
31
+ }
32
+ body = {"model": self.model, "messages": [{"role": "user", "content": prompt}]}
33
+
34
+ resp = requests.post(f"{self.endpoint}/chat/completions", json=body, headers=headers)
35
+ if resp.status_code != 200:
36
+ raise ValueError(f"API 였λ₯˜: {resp.status_code} - {resp.text}")
37
+ return resp.json()["choices"][0]["message"]["content"]
38
+
39
+ # ──────────────────────────────
40
+ # βœ… LLM μ„€μ •
41
+ # ──────────────────────────────
42
+ token = os.getenv("GITHUB_TOKEN") or os.getenv("token")
43
+ if not token:
44
+ print("⚠️ GitHub Token이 ν•„μš”ν•©λ‹ˆλ‹€. 예: setx GITHUB_TOKEN your_token")
45
+
46
+ llm = GitHubModelLLM(model="openai/gpt-4.1", token=token)
47
+
48
+ # ──────────────────────────────
49
+ # βœ… LangChain κΈ°λ³Έ 도ꡬ 뢈러였기
50
+ # ──────────────────────────────
51
+ tools = load_tools(
52
+ ["duckduckgo-search", "requests-all", "llm-math", "wikipedia", "youtube-search"],
53
+ llm=llm
54
+ )
55
+
56
+ # ──────────────────────────────
57
+ # βœ… Python μ‹€ν–‰ 도ꡬ (LangChain λ‚΄μž₯)
58
+ # ──────────────────────────────
59
+ python_tool = PythonREPLTool()
60
+ tools.append(Tool(name="python_repl", func=python_tool.run, description="Python μ½”λ“œλ₯Ό μ‹€ν–‰ν•©λ‹ˆλ‹€."))
61
+
62
+ # ──────────────────────────────
63
+ # βœ… 파일 도ꡬ
64
+ # ──────────────────────────────
65
+ BASE_DIR = os.path.join(os.getcwd(), "pixal_files")
66
+ os.makedirs(BASE_DIR, exist_ok=True)
67
+
68
+ def file_write(data: str) -> str:
69
+ try:
70
+ name, content = data.split("\n", 1)
71
+ path = os.path.join(BASE_DIR, name.strip())
72
+ with open(path, "w", encoding="utf-8") as f:
73
+ f.write(content)
74
+ return f"βœ… 파일 μ €μž₯됨: {path}"
75
+ except Exception as e:
76
+ return f"⚠️ 파일 μ €μž₯ 였λ₯˜: {e}"
77
+
78
+ def file_read(filename: str) -> str:
79
+ path = os.path.join(BASE_DIR, filename.strip())
80
+ if not os.path.exists(path):
81
+ return "❌ 파일 μ—†μŒ"
82
+ with open(path, "r", encoding="utf-8") as f:
83
+ return f.read()
84
+
85
+ def file_list(_="") -> str:
86
+ return "\n".join(os.listdir(BASE_DIR))
87
+
88
+ def file_delete(filename: str) -> str:
89
+ path = os.path.join(BASE_DIR, filename.strip())
90
+ if os.path.exists(path):
91
+ os.remove(path)
92
+ return f"πŸ—‘οΈ μ‚­μ œ μ™„λ£Œ: {filename}"
93
+ return "❌ 파일 μ—†μŒ"
94
+
95
+ # ──────────────────────────────
96
+ # βœ… μ‹œκ°„ 도ꡬ
97
+ # ──────────────────────────────
98
+ def time_now(_=""):
99
+ now = datetime.datetime.now(datetime.timezone(datetime.timedelta(hours=9)))
100
+ return f"ν˜„μž¬ μ‹œκ°: {now.strftime('%Y-%m-%d %H:%M:%S')} (Asia/Seoul)"
101
+
102
+ # ──────────────────────────────
103
+ # βœ… 도ꡬ 등둝
104
+ # ──────────────────────────────
105
+ tools.extend([
106
+ Tool(name="file_write", func=file_write, description="νŒŒμΌμ„ 생성/μˆ˜μ •ν•©λ‹ˆλ‹€."),
107
+ Tool(name="file_read", func=file_read, description="파일 λ‚΄μš©μ„ μ½μŠ΅λ‹ˆλ‹€."),
108
+ Tool(name="file_list", func=file_list, description="파일 λͺ©λ‘μ„ ν‘œμ‹œν•©λ‹ˆλ‹€."),
109
+ Tool(name="file_delete", func=file_delete, description="νŒŒμΌμ„ μ‚­μ œν•©λ‹ˆλ‹€."),
110
+ Tool(name="time_now", func=time_now, description="ν˜„μž¬ μ‹œκ°„μ„ λ°˜ν™˜ν•©λ‹ˆλ‹€."),
111
+ ])
112
+
113
+ # ──────────────────────────────
114
+ # βœ… Agent μ΄ˆκΈ°ν™”
115
+ # ─────────────────────���────────
116
+ agent = initialize_agent(
117
+ tools,
118
+ llm,
119
+ agent_type=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
120
+ verbose=True
121
+ )
122
+
123
+ # ──────────────────────────────
124
+ # βœ… Gradio UI
125
+ # ──────────────────────────────
126
+ def chat(message, history):
127
+ try:
128
+ response = agent.run(message)
129
+ except Exception as e:
130
+ response = f"⚠️ 였λ₯˜: {e}"
131
+ history = history + [(message, response)]
132
+ return history, history
133
+
134
+ with gr.Blocks(theme=gr.themes.Soft(), title="PIXAL Assistant (LangChain + GitHub LLM)") as demo:
135
+ gr.Markdown("""
136
+ ## πŸ€– PIXAL Assistant
137
+ **LangChain 기반 λ©€ν‹°νˆ΄ μ—μ΄μ „νŠΈ**
138
+ 🧰 DuckDuckGo · Wikipedia · YouTube · Math · Requests · Python REPL · File · Time
139
+ """)
140
+ chatbot = gr.Chatbot(label="PIXAL λŒ€ν™”", height=600, latex=True)
141
+ msg = gr.Textbox(label="λ©”μ‹œμ§€", placeholder="λͺ…λ Ή λ˜λŠ” μ§ˆλ¬Έμ„ μž…λ ₯ν•˜μ„Έμš”...")
142
+ clear = gr.Button("μ΄ˆκΈ°ν™”")
143
+
144
+ msg.submit(chat, [msg, chatbot], [chatbot, chatbot])
145
+ clear.click(lambda: None, None, chatbot, queue=False)
146
+
147
+ if __name__ == "__main__":
148
+ demo.launch(server_name="0.0.0.0", server_port=7860)