Percy3822 commited on
Commit
4dfde07
·
verified ·
1 Parent(s): ffd5dfa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +139 -18
app.py CHANGED
@@ -1,25 +1,146 @@
1
- ---
 
 
 
 
 
2
 
3
- # 3. 📄 Dummy Specialized AI (Space: ActualDummyAI)
 
 
 
4
 
5
- app.py:
6
- ```python
7
- from fastapi import FastAPI, WebSocket
8
- import asyncio, json
9
 
10
- app = FastAPI()
 
 
11
 
12
- @app.websocket("/ws/dummy")
13
- async def ws_dummy(ws: WebSocket):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  await ws.accept()
15
- await ws.send_json({"type":"ready","msg":"Dummy AI online"})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  try:
17
- async for msg in ws.iter_text():
18
- data = json.loads(msg)
19
- # pretend to process step by step
20
- for i in range(3):
21
- await asyncio.sleep(1)
22
- await ws.send_json({"type":"step","text":f"Step {i+1} for task: {data}"})
23
- await ws.send_json({"type":"done","result":f"Finished task: {data}"})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
  except Exception as e:
25
- await ws.send_json({"error":str(e)})
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import os
3
+ import time
4
+ import orjson
5
+ from fastapi import FastAPI, WebSocket, WebSocketDisconnect
6
+ from fastapi.responses import JSONResponse
7
 
8
+ # ----------------------------
9
+ # App
10
+ # ----------------------------
11
+ app = FastAPI(title="Dummy Python AI", version="1.0.0")
12
 
13
+ START_TS = time.time()
 
 
 
14
 
15
+ def j(obj) -> str:
16
+ # Fast JSON dumps
17
+ return orjson.dumps(obj).decode("utf-8")
18
 
19
+ @app.get("/health")
20
+ def health():
21
+ return JSONResponse({
22
+ "ok": True,
23
+ "service": "dummy-ai",
24
+ "uptime_sec": round(time.time() - START_TS, 2)
25
+ })
26
+
27
+ # ----------------------------
28
+ # WebSocket Protocol
29
+ # Client -> Server:
30
+ # {"type":"task", "text":"..."} # start a task
31
+ # {"type":"telemetry", "cpu":.., "mem":..} # periodic telemetry
32
+ # {"type":"cancel"} # cancel current stream
33
+ #
34
+ # Server -> Client:
35
+ # {"type":"ready"} # once on connect
36
+ # {"type":"log","msg":"..."} # log line
37
+ # {"type":"token","text":"..." } # streaming token
38
+ # {"type":"say","text":"..."} # client should speak this ASAP
39
+ # {"type":"done","result":"..."} # task completed
40
+ # {"type":"error","msg":"..."} # error
41
+ # ----------------------------
42
+
43
+ @app.websocket("/ws/ai")
44
+ async def ws_ai(ws: WebSocket):
45
  await ws.accept()
46
+ await ws.send_text(j({"type": "ready", "msg": "Dummy AI online"}))
47
+
48
+ current_task = None
49
+ current_cancel = asyncio.Event()
50
+
51
+ async def stream_dummy_answer(prompt: str):
52
+ """Stream a staged, convincing dummy answer with tokens and say-cues."""
53
+ try:
54
+ # 1) acknowledge
55
+ await ws.send_text(j({"type":"log","msg":f"Received task: {prompt[:120]}"}))
56
+ await asyncio.sleep(0.2)
57
+
58
+ # 2) "thinking…" (simulate tool use / chain-of-thought without revealing it)
59
+ phases = [
60
+ "Analyzing your request",
61
+ "Planning steps",
62
+ "Executing subtask 1",
63
+ "Executing subtask 2",
64
+ "Compiling results"
65
+ ]
66
+ for ph in phases:
67
+ if current_cancel.is_set(): return
68
+ await ws.send_text(j({"type":"log","msg":ph}))
69
+ await asyncio.sleep(0.35)
70
+
71
+ # 3) start streaming an answer token-by-token
72
+ answer = (
73
+ "Sure — here’s a dummy streamed response to verify your end-to-end pipeline. "
74
+ "I’m emitting short tokens so your client UI can show them live, "
75
+ "and your TTS can speak them as they arrive."
76
+ )
77
+ # also ask client to speak a "lead in" immediately
78
+ await ws.send_text(j({"type":"say","text":"Starting response."}))
79
+
80
+ for token in answer.split(" "):
81
+ if current_cancel.is_set(): return
82
+ await ws.send_text(j({"type":"token","text":token + " "}))
83
+ await asyncio.sleep(0.06) # controls stream cadence
84
+
85
+ if current_cancel.is_set(): return
86
+
87
+ await asyncio.sleep(0.15)
88
+ await ws.send_text(j({"type":"say","text":"Response complete."}))
89
+ await ws.send_text(j({"type":"done","result":"OK"}))
90
+
91
+ except Exception as e:
92
+ await ws.send_text(j({"type":"error","msg":str(e)}))
93
+
94
  try:
95
+ while True:
96
+ raw = await ws.receive_text()
97
+ try:
98
+ msg = orjson.loads(raw)
99
+ except Exception:
100
+ await ws.send_text(j({"type":"error","msg":"Invalid JSON"}))
101
+ continue
102
+
103
+ mtype = msg.get("type")
104
+ if mtype == "telemetry":
105
+ # best-effort log
106
+ await ws.send_text(j({
107
+ "type":"log",
108
+ "msg": f"Telemetry cpu={msg.get('cpu')} mem={msg.get('mem')} active={msg.get('active_window')}"
109
+ }))
110
+ continue
111
+
112
+ if mtype == "cancel":
113
+ current_cancel.set()
114
+ await ws.send_text(j({"type":"log","msg":"Cancel requested"}))
115
+ continue
116
+
117
+ if mtype == "task":
118
+ # cancel any ongoing stream
119
+ if current_task and not current_task.done():
120
+ current_cancel.set()
121
+ with contextlib.suppress(asyncio.CancelledError):
122
+ current_task.cancel()
123
+ current_task = None
124
+ await asyncio.sleep(0.05)
125
+
126
+ # reset cancel flag
127
+ current_cancel = asyncio.Event()
128
+ prompt = str(msg.get("text", "")).strip() or "(empty)"
129
+ current_task = asyncio.create_task(stream_dummy_answer(prompt))
130
+ continue
131
+
132
+ await ws.send_text(j({"type":"error","msg":f"Unknown message type '{mtype}'"}))
133
+
134
+ except WebSocketDisconnect:
135
+ # client left
136
+ return
137
  except Exception as e:
138
+ try:
139
+ await ws.send_text(j({"type":"error","msg":str(e)}))
140
+ finally:
141
+ return
142
+
143
+ # ------------- Local run -------------
144
+ if _name_ == "_main_":
145
+ import uvicorn, contextlib
146
+ uvicorn.run(app, host="0.0.0.0", port=int(os.environ.get("PORT", "7860")))