Eteims commited on
Commit
f3fa710
Β·
verified Β·
1 Parent(s): 26c6eb4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +72 -23
app.py CHANGED
@@ -3,8 +3,9 @@ from openai import OpenAI
3
  import os
4
  import json
5
  from novita_sandbox.code_interpreter import Sandbox
 
6
 
7
- # Create client
8
  client = OpenAI(
9
  base_url="https://api.novita.ai/openai",
10
  api_key=os.environ["NOVITA_API_KEY"],
@@ -12,10 +13,10 @@ client = OpenAI(
12
 
13
  model = "meta-llama/llama-3.3-70b-instruct"
14
 
15
- # Initialize sandbox with working directory
16
  sandbox = Sandbox.create(timeout=1200)
17
 
18
- # Define tool functions (no JSON serialization now)
19
  def read_file(path: str):
20
  print(f"[DEBUG] read_file called with path: {path}")
21
  try:
@@ -58,7 +59,7 @@ def run_commands(command: str):
58
  print(f"[DEBUG] run_commands error: {e}")
59
  return f"Error running command: {e}"
60
 
61
- # Register tools
62
  tools = [
63
  {
64
  "type": "function",
@@ -67,9 +68,7 @@ tools = [
67
  "description": "Read contents of a file inside the sandbox",
68
  "parameters": {
69
  "type": "object",
70
- "properties": {
71
- "path": {"type": "string", "description": "File path in the sandbox"}
72
- },
73
  "required": ["path"],
74
  },
75
  },
@@ -82,8 +81,8 @@ tools = [
82
  "parameters": {
83
  "type": "object",
84
  "properties": {
85
- "path": {"type": "string", "description": "File path in the sandbox"},
86
- "data": {"type": "string", "description": "Content to write"},
87
  },
88
  "required": ["path", "data"],
89
  },
@@ -121,22 +120,26 @@ tools = [
121
  "parameters": {
122
  "type": "object",
123
  "properties": {
124
- "command": {
125
- "type": "string",
126
- "description": "The shell command to run, e.g. 'ls' or 'python main.py'",
127
- }
128
  },
129
  "required": ["command"],
130
  },
131
  },
132
- }
133
  ]
134
 
135
- # Persistent messages
136
  messages = []
137
 
 
 
 
 
 
 
 
138
  def chat_fn(user_message, history):
139
- global messages
140
  messages.append({"role": "user", "content": user_message})
141
 
142
  # Send to model
@@ -169,7 +172,6 @@ def chat_fn(user_message, history):
169
  fn_result = run_commands(**fn_args)
170
  else:
171
  fn_result = f"Error: Unknown tool {fn_name}"
172
- print(f"[DEBUG] Unknown tool requested: {fn_name}")
173
 
174
  messages.append({
175
  "tool_call_id": tool_call.id,
@@ -189,12 +191,59 @@ def chat_fn(user_message, history):
189
 
190
  return output_text
191
 
192
- with gr.Blocks() as demo:
193
- gr.ChatInterface(chat_fn, title="Sandbox Chat Agent")
194
-
195
- # Graceful cleanup when the server stops
196
- import atexit
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
197
  atexit.register(lambda: (sandbox.kill(), print("[DEBUG] Sandbox terminated. πŸ‘‹")))
198
 
199
  if __name__ == "__main__":
200
- demo.launch()
 
 
3
  import os
4
  import json
5
  from novita_sandbox.code_interpreter import Sandbox
6
+ import atexit
7
 
8
+ # --- Initialization ---
9
  client = OpenAI(
10
  base_url="https://api.novita.ai/openai",
11
  api_key=os.environ["NOVITA_API_KEY"],
 
13
 
14
  model = "meta-llama/llama-3.3-70b-instruct"
15
 
16
+ # Create sandbox
17
  sandbox = Sandbox.create(timeout=1200)
18
 
19
+ # --- Tool functions ---
20
  def read_file(path: str):
21
  print(f"[DEBUG] read_file called with path: {path}")
22
  try:
 
59
  print(f"[DEBUG] run_commands error: {e}")
60
  return f"Error running command: {e}"
61
 
62
+ # --- Register tools ---
63
  tools = [
64
  {
65
  "type": "function",
 
68
  "description": "Read contents of a file inside the sandbox",
69
  "parameters": {
70
  "type": "object",
71
+ "properties": {"path": {"type": "string"}},
 
 
72
  "required": ["path"],
73
  },
74
  },
 
81
  "parameters": {
82
  "type": "object",
83
  "properties": {
84
+ "path": {"type": "string"},
85
+ "data": {"type": "string"},
86
  },
87
  "required": ["path", "data"],
88
  },
 
120
  "parameters": {
121
  "type": "object",
122
  "properties": {
123
+ "command": {"type": "string"},
 
 
 
124
  },
125
  "required": ["command"],
126
  },
127
  },
128
+ },
129
  ]
130
 
131
+ # --- Persistent chat messages ---
132
  messages = []
133
 
134
+ # --- Global model setter ---
135
+ def set_model(selected_model):
136
+ global model
137
+ model = selected_model
138
+ print(f"[DEBUG] Model switched to: {model}")
139
+ return f"βœ… Model switched to **{model}**"
140
+
141
  def chat_fn(user_message, history):
142
+ global messages, model
143
  messages.append({"role": "user", "content": user_message})
144
 
145
  # Send to model
 
172
  fn_result = run_commands(**fn_args)
173
  else:
174
  fn_result = f"Error: Unknown tool {fn_name}"
 
175
 
176
  messages.append({
177
  "tool_call_id": tool_call.id,
 
191
 
192
  return output_text
193
 
194
+ # --- Command Interface function ---
195
+ def execute_command(command):
196
+ if not command.strip():
197
+ return "⚠️ Please enter a command."
198
+ print(f"[DEBUG] Executing command from interface: {command}")
199
+ output = run_commands(command)
200
+ return f"```bash\n{output}\n```" if output else "βœ… Command executed (no output)."
201
+
202
+ # --- Gradio UI ---
203
+ with gr.Blocks(title="Novita Sandbox App") as demo:
204
+ gr.Markdown("## 🧠 Novita Sandbox Agent")
205
+
206
+ with gr.Row(equal_height=True):
207
+ # Left: Chat Interface
208
+ with gr.Column(scale=2):
209
+ gr.Markdown("### πŸ’¬ Chat Interface")
210
+ gr.ChatInterface(chat_fn)
211
+
212
+ # Right: Command Interface
213
+ with gr.Column(scale=1):
214
+ gr.Markdown("### πŸ’» Command Interface")
215
+
216
+ # Model selector
217
+ model_selector = gr.Dropdown(
218
+ label="Select Model",
219
+ choices=[
220
+ "meta-llama/llama-3.3-70b-instruct",
221
+ "deepseek/deepseek-v3.2-exp",
222
+ "qwen/qwen3-coder-30b-a3b-instruct",
223
+ "openai/gpt-oss-120b",
224
+ "moonshotai/kimi-k2-instruct",
225
+ ],
226
+ value=model,
227
+ interactive=True,
228
+ )
229
+
230
+ model_status = gr.Markdown(f"βœ… Current model: **{model}**")
231
+ model_selector.change(set_model, inputs=model_selector, outputs=model_status)
232
+
233
+ command_input = gr.Textbox(
234
+ label="Command",
235
+ placeholder="e.g., ls, python main.py",
236
+ lines=1,
237
+ )
238
+ with gr.Row():
239
+ run_btn = gr.Button("Run", variant="primary", scale=0)
240
+ command_output = gr.Markdown("Command output will appear here...")
241
+
242
+ run_btn.click(execute_command, inputs=command_input, outputs=command_output)
243
+
244
+ # --- Cleanup on exit ---
245
  atexit.register(lambda: (sandbox.kill(), print("[DEBUG] Sandbox terminated. πŸ‘‹")))
246
 
247
  if __name__ == "__main__":
248
+ demo.launch()
249
+