AEUPH commited on
Commit
8431283
·
verified ·
1 Parent(s): 94e1d62

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +544 -16
Dockerfile CHANGED
@@ -1,31 +1,559 @@
 
1
  FROM python:3.10-slim
2
 
3
  # Set working directory
4
  WORKDIR /app
5
 
6
- # Copy requirements and install
7
- COPY requirements.txt .
8
- RUN pip install --no-cache-dir -r requirements.txt
9
 
10
- # Create a user to avoid running as root (HF requirement)
11
- RUN useradd -m -u 1000 user
 
 
 
 
 
 
 
 
 
12
 
13
- # Switch to user
 
14
  USER user
15
  ENV HOME=/home/user \
16
  PATH=/home/user/.local/bin:$PATH
17
 
18
- # Copy the monolith to the container
19
- COPY --chown=user . .
 
 
 
 
 
20
 
21
- # --- DEBUGGING STEP ---
22
- # This will print the contents of /app to your build logs.
23
- # Check the logs to see where app.py actually is!
24
- RUN ls -R /app
25
- # ----------------------
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
 
27
- # Expose the standard Hugging Face port
28
- EXPOSE 7860
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
 
30
- # Launch the Monolith
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  CMD ["python", "app.py"]
 
1
+ # Use a lightweight Python base
2
  FROM python:3.10-slim
3
 
4
  # Set working directory
5
  WORKDIR /app
6
 
7
+ # 1. Install System Dependencies
8
+ # 'git' is often needed by Diffusers/Transformers to load models
9
+ RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/*
10
 
11
+ # 2. Install Python Dependencies
12
+ # We include 'accelerate' and 'transformers' which are required by 'diffusers'
13
+ RUN pip install --no-cache-dir \
14
+ torch \
15
+ flask \
16
+ flask-sock \
17
+ diffusers \
18
+ transformers \
19
+ accelerate \
20
+ safetensors \
21
+ scipy
22
 
23
+ # 3. Create a non-root user (Required for Hugging Face Spaces)
24
+ RUN useradd -m -u 1000 user
25
  USER user
26
  ENV HOME=/home/user \
27
  PATH=/home/user/.local/bin:$PATH
28
 
29
+ # 4. Write the Monolith Application to disk
30
+ # We use 'COPY <<'EOF' app.py' to preserve all special characters, quotes, and backslashes exactly.
31
+ COPY --chown=user <<'EOF' app.py
32
+ import sys
33
+ import os
34
+ import subprocess
35
+ import time
36
 
37
+ # ============================================================================
38
+ # 1. PORTABLE ENVIRONMENT SELF-CORRECTION (Legacy Support)
39
+ # ============================================================================
40
+ def ensure_portable_env():
41
+ """
42
+ Checks if a local 'py312' folder exists.
43
+ If yes, and we aren't running from it, RESTART script using that Python.
44
+ """
45
+ base_dir = os.path.dirname(os.path.abspath(__file__))
46
+ portable_python = os.path.join(base_dir, "py312", "python.exe")
47
+
48
+ if os.path.exists(portable_python):
49
+ current_exe = os.path.normpath(sys.executable).lower()
50
+ target_exe = os.path.normpath(portable_python).lower()
51
+
52
+ if current_exe != target_exe:
53
+ print(f"[*] SWITCHING TO PORTABLE KERNEL: {portable_python}")
54
+ args = [portable_python, __file__] + sys.argv[1:]
55
+ subprocess.call(args)
56
+ sys.exit(0)
57
+ else:
58
+ print(f"[*] Portable 'py312' not found. Using system Python: {sys.executable}")
59
 
60
+ ensure_portable_env()
61
+
62
+ # ============================================================================
63
+ # 2. DEPENDENCY CHECK & IMPORTS
64
+ # ============================================================================
65
+ print(f"[*] NeuralOS Monolith Bootloader v9.2 (Running in: {sys.executable})")
66
+
67
+ try:
68
+ import torch
69
+ from flask import Flask
70
+ from flask_sock import Sock
71
+ from diffusers import StableDiffusionPipeline, LCMScheduler, AutoencoderTiny
72
+ from PIL import Image, ImageDraw, ImageFont
73
+ except ImportError as e:
74
+ print(f"\n[!] CRITICAL: Missing dependency: {e.name}")
75
+ sys.exit(1)
76
+
77
+ # ============================================================================
78
+ # 3. EMBEDDED GUI
79
+ # ============================================================================
80
+
81
+ HTML_TEMPLATE = """
82
+ <!DOCTYPE html>
83
+ <html lang="en">
84
+ <head>
85
+ <meta charset="UTF-8">
86
+ <title>NeuralOS | Monolith</title>
87
+ <style>
88
+ @import url('https://fonts.googleapis.com/css2?family=VT323&display=swap');
89
+
90
+ body {
91
+ background: #050505;
92
+ margin: 0;
93
+ overflow: hidden;
94
+ display: flex;
95
+ justify-content: center;
96
+ align-items: center;
97
+ height: 100vh;
98
+ font-family: 'VT323', monospace;
99
+ }
100
+
101
+ .monitor-case {
102
+ background: #1a1a1a;
103
+ padding: 20px;
104
+ border-radius: 20px;
105
+ box-shadow: 0 0 0 5px #222, 0 0 50px rgba(0,0,0,0.8), inset 0 0 20px rgba(0,0,0,0.5);
106
+ position: relative;
107
+ }
108
+
109
+ .screen-container {
110
+ width: 1024px;
111
+ height: 1024px;
112
+ position: relative;
113
+ background: #000;
114
+ overflow: hidden;
115
+ border-radius: 4px;
116
+ }
117
+
118
+ .crt-overlay {
119
+ position: absolute;
120
+ inset: 0;
121
+ background: linear-gradient(rgba(18, 16, 16, 0) 50%, rgba(0, 0, 0, 0.25) 50%);
122
+ background-size: 100% 4px;
123
+ pointer-events: none;
124
+ z-index: 10;
125
+ }
126
+
127
+ .scanline {
128
+ width: 100%;
129
+ height: 100px;
130
+ z-index: 10;
131
+ background: linear-gradient(0deg, rgba(0,0,0,0) 0%, rgba(255, 255, 255, 0.04) 50%, rgba(0,0,0,0) 100%);
132
+ opacity: 0.1;
133
+ position: absolute;
134
+ bottom: 100%;
135
+ animation: scanline 10s linear infinite;
136
+ pointer-events: none;
137
+ }
138
+
139
+ @keyframes scanline { 0% { bottom: 100%; } 100% { bottom: -100px; } }
140
+
141
+ #display {
142
+ width: 100%;
143
+ height: 100%;
144
+ object-fit: contain;
145
+ image-rendering: pixelated;
146
+ }
147
+
148
+ .controls {
149
+ position: absolute;
150
+ bottom: -60px;
151
+ right: 20px;
152
+ display: flex;
153
+ gap: 10px;
154
+ }
155
+
156
+ .pwr-btn {
157
+ background: #333;
158
+ color: #0f0;
159
+ border: 2px solid #222;
160
+ padding: 10px 20px;
161
+ font-family: 'VT323';
162
+ cursor: pointer;
163
+ text-transform: uppercase;
164
+ box-shadow: 0 4px 0 #111;
165
+ }
166
+ .pwr-btn:active { transform: translateY(4px); box-shadow: none; }
167
+
168
+ .led {
169
+ width: 8px;
170
+ height: 8px;
171
+ border-radius: 50%;
172
+ background: #111;
173
+ margin-top: 15px;
174
+ box-shadow: 0 0 2px #000;
175
+ }
176
+ .led.on { background: #0f0; box-shadow: 0 0 5px #0f0; }
177
+ .led.busy { background: #f00; box-shadow: 0 0 5px #f00; animation: blink 0.1s infinite; }
178
+ @keyframes blink { 0% { opacity: 0.5; } 100% { opacity: 1; } }
179
+ </style>
180
+ </head>
181
+ <body>
182
+ <div class="monitor-case">
183
+ <div class="screen-container">
184
+ <div class="crt-overlay"></div>
185
+ <div class="scanline"></div>
186
+ <img id="display" src="" alt="AWAITING NEURAL SIGNAL..." />
187
+ </div>
188
+ <div class="controls">
189
+ <div class="led on"></div>
190
+ <div id="hdd-led" class="led"></div>
191
+ <button class="pwr-btn" onclick="location.reload()">HARD RESET</button>
192
+ </div>
193
+ </div>
194
+ <script>
195
+ const display = document.getElementById('display');
196
+ const hddLed = document.getElementById('hdd-led');
197
+ let ws = null;
198
+
199
+ function connect() {
200
+ const proto = window.location.protocol === 'https:' ? 'wss' : 'ws';
201
+ ws = new WebSocket(`${proto}://${window.location.host}/kernel`);
202
+
203
+ ws.onmessage = (e) => {
204
+ const msg = JSON.parse(e.data);
205
+ if (msg.type === 'frame_update') {
206
+ display.src = `data:image/jpeg;base64,${msg.data}`;
207
+ hddLed.classList.add('busy');
208
+ setTimeout(() => hddLed.classList.remove('busy'), 50);
209
+ }
210
+ };
211
+
212
+ ws.onclose = () => { setTimeout(connect, 3000); };
213
+ }
214
+
215
+ display.addEventListener('mousedown', (e) => {
216
+ if (!ws) return;
217
+ const rect = display.getBoundingClientRect();
218
+ const x = Math.floor(((e.clientX - rect.left) / rect.width) * 128);
219
+ const y = Math.floor(((e.clientY - rect.top) / rect.height) * 128);
220
+ ws.send(JSON.stringify({ type: 'click', x: x, y: y }));
221
+ });
222
+
223
+ document.addEventListener('keydown', (e) => {
224
+ if (!ws) return;
225
+ if (['Backspace', 'Tab', 'ArrowUp', 'ArrowDown', 'ArrowLeft', 'ArrowRight'].includes(e.key)) {
226
+ e.preventDefault();
227
+ }
228
+ ws.send(JSON.stringify({ type: 'keydown', key: e.key }));
229
+ });
230
+
231
+ connect();
232
+ </script>
233
+ </body>
234
+ </html>
235
+ """
236
+
237
+ # ============================================================================
238
+ # 4. KERNEL & HARDWARE
239
+ # ============================================================================
240
+
241
+ from dataclasses import dataclass, field
242
+ from typing import Dict, List
243
+ import io
244
+ import json
245
+ import base64
246
+
247
+ DRIVERS = {
248
+ "TITLE_BAR": torch.zeros((1, 4, 4, 32), dtype=torch.float16),
249
+ "CLOSE_BTN": torch.zeros((1, 4, 4, 4), dtype=torch.float16),
250
+ "TASKBAR": torch.zeros((1, 4, 6, 128), dtype=torch.float16),
251
+ "START_BTN": torch.zeros((1, 4, 6, 24), dtype=torch.float16),
252
+ "DESKTOP_BG": torch.zeros((1, 4, 128, 128), dtype=torch.float16),
253
+ "ICON_NOTEPAD": torch.zeros((1, 4, 8, 8), dtype=torch.float16),
254
+ "ICON_CMD": torch.zeros((1, 4, 8, 8), dtype=torch.float16),
255
+ "ICON_FOLDER": torch.zeros((1, 4, 8, 8), dtype=torch.float16),
256
+ "ICON_GAME": torch.zeros((1, 4, 8, 8), dtype=torch.float16),
257
+ }
258
+
259
+ def init_drivers():
260
+ DRIVERS["TITLE_BAR"][:, 0, 0:1, :] = 2.0; DRIVERS["TITLE_BAR"][:, 1, :, :] = -1.0
261
+ DRIVERS["CLOSE_BTN"][:, 2, :, :] = 2.5
262
+ DRIVERS["TASKBAR"][:, 0, 0, :] = 1.2
263
+ DRIVERS["START_BTN"][:, 1, 1:5, 2:22] = 1.8
264
+ DRIVERS["DESKTOP_BG"][:, 1, 0:80, :] = 1.2; DRIVERS["DESKTOP_BG"][:, 2, 0:80, :] = 1.5
265
+ DRIVERS["DESKTOP_BG"][:, 1, 80:128, :] = 0.8
266
+ DRIVERS["ICON_NOTEPAD"][:, 0, :, :] = 1.5
267
+ DRIVERS["ICON_CMD"][:, 1, :, :] = -1.0
268
+ DRIVERS["ICON_GAME"][:, 0, 2:6, 2:6] = 2.0
269
 
270
+ @dataclass
271
+ class Application:
272
+ name: str
273
+ icon_dna: str
274
+ default_size: tuple
275
+ is_game: bool = False
276
+
277
+ PROGRAMS = {
278
+ "notepad": Application("Notepad", "ICON_NOTEPAD", (48, 40)),
279
+ "cmd": Application("Terminal", "ICON_CMD", (56, 40)),
280
+ "explorer": Application("My Computer", "ICON_FOLDER", (72, 56)),
281
+ "doom": Application("NeuroDoom", "ICON_GAME", (64, 48), True)
282
+ }
283
+
284
+ @dataclass
285
+ class Process:
286
+ pid: int
287
+ name: str
288
+ app_type: str
289
+ position: tuple
290
+ size: tuple
291
+ latent_state: torch.Tensor
292
+ text_buffer: List[str] = field(default_factory=list)
293
+ input_focus: bool = False
294
+ status: str = "running"
295
+ z_order: int = 0
296
+ meta: Dict = field(default_factory=dict)
297
+
298
+ class OSKernel:
299
+ def __init__(self):
300
+ self.processes = {}
301
+ self.next_pid = 1000
302
+ self.focused_pid = None
303
+ self.system_state = "BOOT"
304
+ self.boot_log = []
305
+ self.desktop_latent = None
306
+ self.desktop_icons = []
307
+ self.start_menu_open = False
308
+ self.current_dir = "C:\\Users\\Admin"
309
+
310
+ def init_desktop(self):
311
+ self.desktop_latent = DRIVERS["DESKTOP_BG"].clone()
312
+ self.desktop_icons = [
313
+ {"app": "notepad", "x": 4, "y": 4},
314
+ {"app": "cmd", "x": 4, "y": 16},
315
+ {"app": "explorer", "x": 4, "y": 28},
316
+ {"app": "doom", "x": 4, "y": 40},
317
+ ]
318
+ self.system_state = "DESKTOP"
319
+
320
+ def spawn(self, app_type, x, y):
321
+ if app_type not in PROGRAMS: return
322
+ app = PROGRAMS[app_type]
323
+ pid = self.next_pid
324
+ self.next_pid += 1
325
+
326
+ buf = []
327
+ if app_type == "cmd":
328
+ buf = ["NEURAL OS [Version 9.2]", "(C) Monolith Corp", "", f"{self.current_dir}>"]
329
+ elif app_type == "notepad":
330
+ buf = ["_"]
331
+ elif app_type == "doom":
332
+ buf = ["[INIT] LOADING TEXTURES...", "[INIT] CONNECTING NEURAL NET..."]
333
+
334
+ self.processes[pid] = Process(
335
+ pid, app.name, app_type, (x, y), app.default_size,
336
+ torch.zeros((1, 4, app.default_size[1], app.default_size[0]), dtype=torch.float16),
337
+ buf, False, "running", pid, {"state": "menu"} if app.is_game else {}
338
+ )
339
+ self.focus(pid)
340
+ return pid
341
+
342
+ def focus(self, pid):
343
+ if pid in self.processes:
344
+ for p in self.processes.values(): p.input_focus = False
345
+ self.processes[pid].input_focus = True
346
+ self.processes[pid].z_order = max([p.z_order for p in self.processes.values()], default=0) + 1
347
+ self.focused_pid = pid
348
+
349
+ def kill(self, pid):
350
+ if pid in self.processes: del self.processes[pid]
351
+
352
+ def input(self, key):
353
+ if not self.focused_pid: return
354
+ proc = self.processes[self.focused_pid]
355
+
356
+ if proc.app_type == "cmd":
357
+ if key == "Enter":
358
+ cmd = proc.text_buffer[-1].split(">")[-1].strip()
359
+ proc.text_buffer.append("")
360
+ if cmd == "dir": proc.text_buffer.append(" <DIR> Documents")
361
+ elif cmd == "cls": proc.text_buffer = []
362
+ elif cmd == "exit": self.kill(proc.pid)
363
+ elif cmd.startswith("start "):
364
+ target = cmd.split(" ")[1]
365
+ if target in PROGRAMS:
366
+ proc.meta["spawn_req"] = target
367
+ proc.text_buffer.append(f"Starting {target}...")
368
+ else: proc.text_buffer.append("Unknown command")
369
+ proc.text_buffer.append(f"{self.current_dir}>")
370
+ elif key == "Backspace":
371
+ if ">" in proc.text_buffer[-1] and len(proc.text_buffer[-1].split(">")[-1]) > 0:
372
+ proc.text_buffer[-1] = proc.text_buffer[-1][:-1]
373
+ elif len(key) == 1: proc.text_buffer[-1] += key
374
+
375
+ elif proc.app_type == "notepad":
376
+ if key == "Enter": proc.text_buffer.append("")
377
+ elif key == "Backspace":
378
+ if len(proc.text_buffer[-1]) > 0: proc.text_buffer[-1] = proc.text_buffer[-1][:-1]
379
+ elif len(proc.text_buffer) > 1: proc.text_buffer.pop()
380
+ elif len(key) == 1: proc.text_buffer[-1] += key
381
+
382
+ elif proc.app_type == "doom":
383
+ proc.meta["last_key"] = key
384
+ proc.meta["needs_update"] = True
385
+
386
+ def composite(self):
387
+ if self.system_state == "BOOT": return None
388
+ out = self.desktop_latent.clone()
389
+ for i in self.desktop_icons:
390
+ if PROGRAMS[i['app']].icon_dna in DRIVERS:
391
+ dna = DRIVERS[PROGRAMS[i['app']].icon_dna]
392
+ out[:, :, i['y']:i['y']+8, i['x']:i['x']+8] = dna
393
+ for p in sorted(self.processes.values(), key=lambda x: x.z_order):
394
+ x, y = p.position
395
+ w, h = p.size
396
+ if x+w <= 128 and y+h <= 128:
397
+ out[:, :, y:y+h, x:x+w] = p.latent_state
398
+ tb = DRIVERS["TASKBAR"].clone()
399
+ tb[:, :, :, 0:24] = DRIVERS["START_BTN"]
400
+ out[:, :, 122:128, :] = tb
401
+ return out
402
+
403
+ # ============================================================================
404
+ # 5. SERVER
405
+ # ============================================================================
406
+
407
+ app = Flask(__name__)
408
+ sock = Sock(app)
409
+ pipe = None
410
+ kernel = OSKernel()
411
+
412
+ def init_ai():
413
+ global pipe
414
+ print("[*] Loading Neural Graphics Pipeline...")
415
+
416
+ # AUTO-DETECT DEVICE
417
+ if torch.cuda.is_available():
418
+ device = "cuda"
419
+ dtype = torch.float16
420
+ print(f"[✓] CUDA Detected: {torch.cuda.get_device_name(0)}")
421
+ else:
422
+ device = "cpu"
423
+ dtype = torch.float32
424
+ print("[!] WARNING: CUDA Not Found. Running CPU Mode.")
425
+
426
+ try:
427
+ pipe = StableDiffusionPipeline.from_pretrained(
428
+ "runwayml/stable-diffusion-v1-5",
429
+ torch_dtype=dtype,
430
+ variant="fp16" if device=="cuda" else None
431
+ ).to(device)
432
+
433
+ if device == "cuda":
434
+ try:
435
+ pipe.load_lora_weights("latent-consistency/lcm-lora-sdv1-5")
436
+ pipe.scheduler = LCMScheduler.from_config(pipe.scheduler.config)
437
+ pipe.vae = AutoencoderTiny.from_pretrained("madebyollin/taesd", torch_dtype=dtype).to(device)
438
+ print("[✓] Optimizations (LCM+TAE) Enabled")
439
+ except:
440
+ print("[!] Using Standard Scheduler")
441
+ except Exception as e:
442
+ print(f"[!] AI Init Error: {e}")
443
+
444
+ def render_frame(k_obj):
445
+ latent_tensor = k_obj.composite()
446
+ if latent_tensor is not None:
447
+ with torch.no_grad():
448
+ latents = (1 / 0.18215) * latent_tensor.to(pipe.device)
449
+ if pipe.device.type == 'cpu': latents = latents.float()
450
+
451
+ img = pipe.vae.decode(latents).sample
452
+ img = (img / 2 + 0.5).clamp(0, 1).nan_to_num()
453
+ img = img.cpu().permute(0, 2, 3, 1).numpy()
454
+ pil_img = pipe.numpy_to_pil(img)[0]
455
+ else:
456
+ pil_img = Image.new('RGB', (128, 128), (0,0,0))
457
+
458
+ pil_img = pil_img.resize((1024, 1024), resample=Image.NEAREST)
459
+ draw = ImageDraw.Draw(pil_img)
460
+ font = ImageFont.load_default()
461
+
462
+ if k_obj.system_state == "BOOT":
463
+ y = 50
464
+ for log in k_obj.boot_log:
465
+ draw.text((50, y), log, fill=(0, 255, 0), font=font)
466
+ y += 20
467
+
468
+ scale = 8
469
+ for p in k_obj.processes.values():
470
+ wx, wy = p.position
471
+ cx = (wx * scale) + 10
472
+ cy = (wy * scale) + 30
473
+ draw.text(((wx*scale)+5, (wy*scale)+5), p.name, fill=(255,255,255), font=font)
474
+ if p.text_buffer:
475
+ for i, line in enumerate(p.text_buffer[-20:]):
476
+ col = (0, 255, 0) if p.app_type == "cmd" else (0,0,0)
477
+ if p.app_type == "doom": col = (255, 50, 50)
478
+ draw.text((cx, cy + (i*15)), line, fill=col, font=font)
479
+
480
+ buf = io.BytesIO()
481
+ pil_img.save(buf, format="JPEG", quality=85)
482
+ return base64.b64encode(buf.getvalue()).decode()
483
+
484
+ @app.route('/')
485
+ def index():
486
+ return HTML_TEMPLATE
487
+
488
+ @sock.route('/kernel')
489
+ def websocket_kernel(ws):
490
+ global kernel
491
+ if not pipe: init_ai()
492
+ init_drivers()
493
+ kernel = OSKernel()
494
+
495
+ boot_msgs = ["BIOS DATE 01/23/2026", "DETECTING NEURAL NET...", "LOADING VFS...", "BOOTING..."]
496
+ for msg in boot_msgs:
497
+ kernel.boot_log.append(msg)
498
+ ws.send(json.dumps({"type": "frame_update", "data": render_frame(kernel)}))
499
+ time.sleep(0.5)
500
+
501
+ kernel.init_desktop()
502
+ ws.send(json.dumps({"type": "frame_update", "data": render_frame(kernel)}))
503
+
504
+ while True:
505
+ try:
506
+ data = ws.receive()
507
+ if not data: break
508
+ msg = json.loads(data)
509
+ needs_update = False
510
+
511
+ if msg['type'] == 'click':
512
+ x, y = msg['x'], msg['y']
513
+ if y >= 122 and x < 24:
514
+ kernel.start_menu_open = not kernel.start_menu_open
515
+ needs_update = True
516
+ elif kernel.system_state == "DESKTOP":
517
+ for i in kernel.desktop_icons:
518
+ if i['x'] <= x < i['x']+8 and i['y'] <= y < i['y']+8:
519
+ kernel.spawn(i['app'], 30, 20)
520
+ needs_update = True
521
+ for p in sorted(kernel.processes.values(), key=lambda z: z.z_order, reverse=True):
522
+ px, py = p.position
523
+ pw, ph = p.size
524
+ if px <= x < px+pw and py <= y < py+ph:
525
+ kernel.focus(p.pid)
526
+ if py <= y < py+4 and px+pw-4 <= x < px+pw: kernel.kill(p.pid)
527
+ needs_update = True
528
+ break
529
+
530
+ elif msg['type'] == 'keydown':
531
+ kernel.input(msg['key'])
532
+ if kernel.focused_pid:
533
+ p = kernel.processes[kernel.focused_pid]
534
+ if "spawn_req" in p.meta:
535
+ kernel.spawn(p.meta.pop("spawn_req"), 40, 40)
536
+ if p.app_type == "doom" and p.meta.get("needs_update"):
537
+ p.text_buffer.append(f"ACTION: {p.meta.get('last_key')}")
538
+ p.meta["needs_update"] = False
539
+ needs_update = True
540
+
541
+ if needs_update:
542
+ ws.send(json.dumps({"type": "frame_update", "data": render_frame(kernel)}))
543
+
544
+ except Exception as e:
545
+ print(f"WS Error: {e}")
546
+ break
547
+
548
+ if __name__ == "__main__":
549
+ print("="*50)
550
+ print(" NEURAL OS MONOLITH v9.2")
551
+ print(" http://0.0.0.0:7860")
552
+ print("="*50)
553
+ # Updated to port 7860 for Hugging Face
554
+ app.run(host="0.0.0.0", port=7860, threaded=True)
555
+ EOF
556
+
557
+ # 5. Launch the Monolith
558
+ EXPOSE 7860
559
  CMD ["python", "app.py"]