Boning c commited on
Commit
2884b27
·
verified ·
1 Parent(s): 995d061

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -94
app.py CHANGED
@@ -1,115 +1,57 @@
1
- import os
2
- import re
3
  import subprocess
4
- import gradio as gr
5
- from transformers import AutoTokenizer
6
- from optimum.onnxruntime import ORTModelForCausalLM
7
 
8
- # 1. Prepare a writable workspace
9
- WORKDIR = os.path.join(os.environ["HOME"], "workspace")
10
  os.makedirs(WORKDIR, exist_ok=True)
11
 
12
- # 2. Load DialoGPT‐medium (chat‐tuned) via ONNXRuntime
13
- MODEL_ID = "microsoft/DialoGPT-medium"
14
- tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)
15
- model = ORTModelForCausalLM.from_pretrained(MODEL_ID, provider="CPUExecutionProvider")
16
-
17
- # 3. Virtual desktop iframe
18
  NO_VNC_IFRAME = """
19
  <iframe src="http://localhost:6080/vnc.html?autoconnect=true&resize=remote"
20
  style="width:100%; height:600px; border:none;">
21
  </iframe>
22
  """
23
 
24
- def start_desktop():
25
- return NO_VNC_IFRAME
26
 
27
- def run_shell(cmd: str):
28
- proc = subprocess.run(
29
- cmd, shell=True, capture_output=True, text=True,
30
- env={**os.environ, "DISPLAY": ":1"}
31
- )
32
- return proc.stdout + proc.stderr
33
 
34
- def upload_file(file):
35
- dest = os.path.join(WORKDIR, os.path.basename(file.name))
36
- with open(dest, "wb") as f:
37
- f.write(file.read())
38
- return f"✅ Uploaded to: {dest}"
39
 
40
- def agent_chat(user_msg, history):
41
- # System prompt and conversation framing
42
- system = (
43
- "You are a helpful shell assistant. "
44
- "When you need to run a shell command, wrap it in [CMD]...[/CMD]."
 
45
  )
46
- # Build the full conversation string
47
- conv = system + tokenizer.eos_token + "\n"
48
- for u, a in history:
49
- conv += f"User: {u}{tokenizer.eos_token}Assistant: {a}{tokenizer.eos_token}\n"
50
- conv += f"User: {user_msg}{tokenizer.eos_token}Assistant:"
51
 
52
- # Tokenize + generate
53
- inputs = tokenizer(conv, return_tensors="pt")
54
- outputs = model.generate(
55
- **inputs,
56
- max_new_tokens=150,
57
- do_sample=True,
58
- temperature=0.7,
59
- pad_token_id=tokenizer.eos_token_id
60
- )
61
- full_reply = tokenizer.decode(outputs[0], skip_special_tokens=True)
62
- reply = full_reply.split("Assistant:")[-1].strip()
63
 
64
- # Extract & execute shell commands
65
- cmds = re.findall(r"
66
-
67
- \[CMD\]
68
-
69
- (.*?)
70
-
71
- \[/CMD\]
72
-
73
- ", reply, re.DOTALL)
74
- if cmds:
75
- out = ""
76
- for cmd in cmds:
77
- out += f"$ {cmd}\n"
78
- p = subprocess.run(cmd, shell=True, capture_output=True, text=True)
79
- out += p.stdout + p.stderr + "\n"
80
- reply += f"\n\n**Command Output:**\n```\n{out}```"
81
-
82
- history.append((user_msg, reply))
83
- return reply, history
84
-
85
- # 4. Gradio UI
86
- with gr.Blocks() as demo:
87
- gr.Markdown("# 🤖💬 ONNX Chat (DialoGPT) + Shell Agent")
88
 
89
- with gr.Tab("Desktop"):
90
- gr.Markdown("### 🖥️ Virtual Desktop (noVNC)")
91
- btn = gr.Button("Launch Desktop")
92
- desktop = gr.HTML()
93
- btn.click(start_desktop, outputs=desktop)
94
 
95
- with gr.Tab("Shell"):
96
- gr.Markdown("### 🛠️ Direct Shell")
97
- cmd_in = gr.Textbox(label="Command")
98
- cmd_out = gr.Textbox(label="Output", lines=6)
99
- gr.Button("Run").click(run_shell, inputs=cmd_in, outputs=cmd_out)
100
 
101
- with gr.Tab("Chat"):
102
- gr.Markdown("### 💬 Chat with AI (can run shell)")
103
- chat = gr.ChatInterface(
104
- fn=agent_chat,
105
- state=[],
106
- placeholder="Ask me to list files: [CMD]ls workspace[/CMD]"
107
- )
108
 
109
- with gr.Tab("Upload"):
110
- gr.Markdown("### 📂 Upload File to Sandbox")
111
- uploader = gr.File(label="Choose File")
112
- status = gr.Textbox(label="Status")
113
- uploader.change(upload_file, inputs=uploader, outputs=status)
114
 
115
- demo.launch(server_name="0.0.0.0", server_port=7860, share=False)
 
 
 
 
1
  import subprocess
2
+ import os
 
 
3
 
4
+ # Use container’s home directory to avoid permission errors
5
+ WORKDIR = os.path.join(os.environ['HOME'], "workspace")
6
  os.makedirs(WORKDIR, exist_ok=True)
7
 
8
+ # HTML iframe to embed noVNC viewer
 
 
 
 
 
9
  NO_VNC_IFRAME = """
10
  <iframe src="http://localhost:6080/vnc.html?autoconnect=true&resize=remote"
11
  style="width:100%; height:600px; border:none;">
12
  </iframe>
13
  """
14
 
15
+ def start_desktop():
 
16
 
17
+ return NO_VNC_IFRAME
 
 
 
 
 
18
 
19
+ def run_shell(cmd):
 
 
 
 
20
 
21
+ result = subprocess.run(
22
+ cmd,
23
+ shell=True,
24
+ capture_output=True,
25
+ text=True,
26
+ env={**os.environ, "DISPLAY": ":1"} # Supports GUI apps if needed
27
  )
28
+ return result.stdout + result.stderr
 
 
 
 
29
 
30
+ def upload_file(file):
31
+ dest_path = os.path.join(WORKDIR, os.path.basename(file.name))
32
+ with open(dest_path, "wb") as f:
 
 
 
 
 
 
 
 
33
 
34
+ f.write(file.read())
35
+ return f"✅ Uploaded to: {dest_path}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
 
37
+ with gr.Blocks(css="@import url('https://cdn.simplecss.org/simple.min.css');") as demo:
38
+ gr.Markdown("# 🖥️ Offline Visual Sandbox (no API, no cost)")
 
 
 
39
 
40
+ with gr.Tab("Desktop"):
41
+ launch_btn = gr.Button("Launch Desktop")
42
+ desktop_view = gr.HTML()
43
+ launch_btn.click(start_desktop, outputs=desktop_view)
 
44
 
45
+ with gr.Tab("Shell"):
46
+ cmd_input = gr.Textbox(label="Shell Command", placeholder="Try 'ls /home/user/workspace'")
47
+ run_btn = gr.Button("Run")
48
+ cmd_output = gr.Textbox(label="Command Output", lines=10)
49
+ run_btn.click(run_shell, inputs=cmd_input, outputs=cmd_output)
 
 
50
 
51
+ with gr.Tab("Upload"):
52
+ file_input = gr.File(label="Upload File")
53
+ status_output = gr.Textbox(label="Upload Status")
54
+ file_input.change(upload_file, inputs=file_input, outputs=status_output)
 
55
 
56
+ demo.launch(
57
+ server_name="0.0.0.0",