everydaytok commited on
Commit
20b49c8
·
verified ·
1 Parent(s): 6fd6d6a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +485 -102
app.py CHANGED
@@ -1,151 +1,534 @@
1
- import os
2
  import time
3
- import collections
4
- import threading
5
  import random
 
 
 
 
 
6
  from fastapi import FastAPI
7
  from fastapi.responses import HTMLResponse, FileResponse
8
  from fastapi.middleware.cors import CORSMiddleware
 
9
 
10
  app = FastAPI()
11
- app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_methods=["*"], allow_headers=["*"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
 
13
  class SimEngine:
14
  def __init__(self):
15
- self.nodes = {}
 
 
16
  self.running = False
17
- self.mode = 'inference'
18
- self.architecture = 'additive' # How the mesh physically works
19
- self.dataset_type = 'housing' # What data we are forcing into it
20
- self.batch_queue = collections.deque()
21
- self.logs =[]
22
- self.iteration = 0
23
- self.current_error = 0.0
24
- self.reset()
25
-
26
- def reset(self):
27
- # A & B are inputs, C is output. K-factors are the elasticity of the connections.
28
- self.nodes = {
29
- 'A': {'x': 2.0, 'y': 2.0, 'anchored': True, 'k': random.uniform(0.1, 1.0)},
30
- 'B': {'x': 3.0, 'y': -2.0, 'anchored': True, 'k': random.uniform(0.1, 1.0)},
31
- 'C': {'x': 5.0, 'y': 0.0, 'anchored': False, 'k': 1.0}
32
- }
33
- self.batch_queue.clear()
34
- self.logs =[]
35
  self.iteration = 0
36
  self.current_error = 0.0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
 
38
- def add_log(self, msg):
39
- self.logs.insert(0, f"[{self.iteration}]: {msg}")
40
- if len(self.logs) > 15: self.logs.pop()
 
 
 
41
 
42
- def set_problem(self, a, b, c_target=None):
43
- self.nodes['A']['x'] = float(a)
44
- self.nodes['B']['x'] = float(b)
45
- if self.mode == 'training':
46
- self.nodes['C']['x'] = float(c_target)
47
- self.nodes['C']['anchored'] = True
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
  else:
49
- self.nodes['C']['anchored'] = False
50
- # Start C at zero to watch it drift to the answer
51
- self.nodes['C']['x'] = 0.0
52
 
53
- def physics_step(self):
54
- na, nb, nc = self.nodes['A'], self.nodes['B'], self.nodes['C']
55
-
56
- # 1. CORE ARCHITECTURE LOGIC (Completely blind to the dataset)
57
- if self.architecture == 'additive':
58
- prediction = (na['x'] * na['k']) + (nb['x'] * nb['k'])
59
- else: # multiplicative
60
- prediction = (na['x'] * na['k']) * (nb['x'] * nb['k'])
61
-
62
- self.current_error = prediction - nc['x']
63
-
64
- # Check for convergence
65
- if abs(self.current_error) < 0.05:
66
- if self.batch_queue:
67
- p = self.batch_queue.popleft()
68
- self.set_problem(p['a'], p['b'], p['c'])
69
- return True
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
  else:
71
- self.running = False
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
72
  return False
73
 
74
- # 2. RESOLVING TENSION (Push / Pull)
75
- if self.mode == 'inference':
76
- # Shift the output node C until tension reaches zero
77
- nc['x'] += self.current_error * 0.1
78
-
79
- elif self.mode == 'training':
80
- # The structure is locked. Tension deforms the K (Stiffness) factors.
81
- lr = 0.01 # Learning elasticity rate
82
-
83
- if self.architecture == 'additive':
84
- # Force distributes proportionally based on node input value
85
- na['k'] -= self.current_error * na['x'] * lr
86
- nb['k'] -= self.current_error * nb['x'] * lr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
  else:
88
- # Chain rule tension for multiplication
89
- na['k'] -= self.current_error * (nb['x'] * nb['k']) * na['x'] * (lr * 0.01)
90
- nb['k'] -= self.current_error * (na['x'] * na['k']) * nb['x'] * (lr * 0.01)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91
 
92
- self.iteration += 1
93
- return True
94
 
95
  engine = SimEngine()
96
 
 
97
  def run_loop():
98
  while True:
99
- if engine.running: engine.physics_step()
 
100
  time.sleep(0.04)
101
 
 
102
  threading.Thread(target=run_loop, daemon=True).start()
103
 
 
104
  @app.get("/", response_class=HTMLResponse)
105
- async def get_ui(): return FileResponse("index.html")
 
 
106
 
107
  @app.get("/state")
108
  async def get_state():
109
- return {'nodes': engine.nodes, 'error': engine.current_error, 'iter': engine.iteration, 'logs': engine.logs}
 
110
 
111
  @app.post("/config")
112
  async def config(data: dict):
113
- engine.mode = data['mode']
114
- engine.architecture = data['architecture']
115
- engine.dataset_type = data['dataset']
116
- engine.running = False
117
- engine.reset()
118
- engine.add_log(f"Arch: {engine.architecture} | Data: {engine.dataset_type}")
119
  return {"ok": True}
120
 
121
- @app.post("/generate")
122
- async def generate(data: dict):
123
- engine.batch_queue.clear()
124
- for _ in range(30):
125
- a = random.uniform(2, 10)
126
- b = random.uniform(2, 10)
127
-
128
- # This is the dataset environment. The mesh DOES NOT know these rules.
129
- # It must learn them by adjusting K_a and K_b.
130
- if engine.dataset_type == 'housing':
131
- c = (a * 2.5) + (b * 1.2) # Hidden weights: 2.5 and 1.2
132
- elif engine.dataset_type == 'subtraction':
133
- c = (a * 1.0) + (b * -1.0) # Hidden weights: 1.0 and -1.0
134
- elif engine.dataset_type == 'multiplication':
135
- c = a * b
136
-
137
- engine.batch_queue.append({'a': a, 'b': b, 'c': c})
138
-
139
- p = engine.batch_queue.popleft()
140
- engine.set_problem(p['a'], p['b'], p['c'])
141
- engine.running = True
 
142
  return {"ok": True}
143
 
 
144
  @app.post("/halt")
145
  async def halt():
146
- engine.running = False
147
  return {"ok": True}
148
 
 
149
  if __name__ == "__main__":
150
  import uvicorn
151
  uvicorn.run(app, host="0.0.0.0", port=7860)
 
 
1
  import time
2
+ import math
 
3
  import random
4
+ import threading
5
+ import collections
6
+ from dataclasses import dataclass, asdict
7
+ from typing import Optional, List, Dict, Any, Literal
8
+
9
  from fastapi import FastAPI
10
  from fastapi.responses import HTMLResponse, FileResponse
11
  from fastapi.middleware.cors import CORSMiddleware
12
+ from pydantic import BaseModel
13
 
14
  app = FastAPI()
15
+ app.add_middleware(
16
+ CORSMiddleware,
17
+ allow_origins=["*"],
18
+ allow_methods=["*"],
19
+ allow_headers=["*"],
20
+ )
21
+
22
+
23
+ @dataclass
24
+ class EngineConfig:
25
+ architecture: str = "additive" # additive | multiplicative | affine | bilinear | gated
26
+ coeff_mode: str = "single_k" # single_k | triple_k | per_edge_k
27
+ topology: str = "single_cell" # single_cell | chain | mesh
28
+ dataset_family: str = "housing" # housing | subtraction | multiplication | mixed | symbolic
29
+ mode: str = "training" # training | inference
30
+ num_cells: int = 3
31
+ learning_rate: float = 0.01
32
+ damping: float = 0.12
33
+ coupling: float = 0.05
34
+ batch_size: int = 24
35
+ sample_noise: float = 0.0
36
+
37
+
38
+ @dataclass
39
+ class CellState:
40
+ id: int
41
+ a: float = 0.0
42
+ b: float = 0.0
43
+ c: float = 0.0
44
+ target: Optional[float] = None
45
+ label: str = ""
46
+ k: float = 1.0
47
+ ka: float = 1.0
48
+ kb: float = 1.0
49
+ kc: float = 0.0
50
+ prediction: float = 0.0
51
+ error: float = 0.0
52
+ energy: float = 0.0
53
+ force: float = 0.0
54
+ anchored: bool = False
55
+
56
+ def to_dict(self) -> Dict[str, Any]:
57
+ return asdict(self)
58
+
59
 
60
  class SimEngine:
61
  def __init__(self):
62
+ self.lock = threading.Lock()
63
+ self.config = EngineConfig()
64
+
65
  self.running = False
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
  self.iteration = 0
67
  self.current_error = 0.0
68
+ self.current_loss = 0.0
69
+ self.logs = []
70
+
71
+ self.cells: List[CellState] = []
72
+ self.batch_queue = collections.deque()
73
+ self.current_sample: Optional[Dict[str, Any]] = None
74
+ self.last_sample: Optional[Dict[str, Any]] = None
75
+
76
+ self.loss_history = collections.deque(maxlen=120)
77
+ self.error_history = collections.deque(maxlen=120)
78
+
79
+ self.reset_state()
80
+
81
+ def reset_state(self):
82
+ with self.lock:
83
+ self.iteration = 0
84
+ self.current_error = 0.0
85
+ self.current_loss = 0.0
86
+ self.logs = []
87
+ self.batch_queue.clear()
88
+ self.current_sample = None
89
+ self.last_sample = None
90
+ self.loss_history.clear()
91
+ self.error_history.clear()
92
+ self._build_cells()
93
+ self.add_log("Engine reset.")
94
+
95
+ def _build_cells(self):
96
+ count = 1 if self.config.topology == "single_cell" else max(2, int(self.config.num_cells))
97
+ self.cells = []
98
+ for i in range(count):
99
+ self.cells.append(
100
+ CellState(
101
+ id=i,
102
+ a=0.0,
103
+ b=0.0,
104
+ c=0.0,
105
+ k=random.uniform(0.35, 1.25),
106
+ ka=random.uniform(0.35, 1.25),
107
+ kb=random.uniform(0.35, 1.25),
108
+ kc=random.uniform(-0.25, 0.25),
109
+ )
110
+ )
111
+
112
+ def add_log(self, msg: str):
113
+ stamp = f"[{self.iteration}] {msg}"
114
+ self.logs.insert(0, stamp)
115
+ if len(self.logs) > 20:
116
+ self.logs.pop()
117
+
118
+ def configure(self, payload: Dict[str, Any]):
119
+ with self.lock:
120
+ self.config.architecture = payload.get("architecture", self.config.architecture)
121
+ self.config.coeff_mode = payload.get("coeff_mode", self.config.coeff_mode)
122
+ self.config.topology = payload.get("topology", self.config.topology)
123
+ self.config.dataset_family = payload.get("dataset_family", self.config.dataset_family)
124
+ self.config.mode = payload.get("mode", self.config.mode)
125
+ self.config.num_cells = int(payload.get("num_cells", self.config.num_cells))
126
+ self.config.learning_rate = float(payload.get("learning_rate", self.config.learning_rate))
127
+ self.config.damping = float(payload.get("damping", self.config.damping))
128
+ self.config.coupling = float(payload.get("coupling", self.config.coupling))
129
+ self.config.batch_size = int(payload.get("batch_size", self.config.batch_size))
130
+ self.config.sample_noise = float(payload.get("sample_noise", self.config.sample_noise))
131
 
132
+ self.running = False
133
+ self.reset_state()
134
+ self.add_log(
135
+ f"Config applied: {self.config.architecture} | {self.config.coeff_mode} | "
136
+ f"{self.config.topology} | {self.config.dataset_family} | {self.config.mode}"
137
+ )
138
 
139
+ def _sample_housing(self):
140
+ a = random.uniform(2, 10)
141
+ b = random.uniform(2, 10)
142
+ c = (2.5 * a) + (1.2 * b) + random.uniform(-self.config.sample_noise, self.config.sample_noise)
143
+ return a, b, c, "housing_affine"
144
+
145
+ def _sample_subtraction(self):
146
+ a = random.uniform(2, 10)
147
+ b = random.uniform(2, 10)
148
+ c = (1.0 * a) + (-1.0 * b) + random.uniform(-self.config.sample_noise, self.config.sample_noise)
149
+ return a, b, c, "signed_subtraction"
150
+
151
+ def _sample_multiplication(self):
152
+ a = random.uniform(2, 10)
153
+ b = random.uniform(2, 10)
154
+ c = (a * b) + random.uniform(-self.config.sample_noise, self.config.sample_noise)
155
+ return a, b, c, "multiplicative"
156
+
157
+ def _sample_symbolic(self):
158
+ a = random.uniform(1, 12)
159
+ b = random.uniform(1, 12)
160
+ branch = random.choice(["affine", "signed_affine", "hybrid"])
161
+ if branch == "affine":
162
+ c = (1.7 * a) + (0.9 * b)
163
+ elif branch == "signed_affine":
164
+ c = (0.8 * a) + (-1.4 * b) + 2.0
165
  else:
166
+ c = (a * 0.6) + (b * 0.4) + ((a * b) * 0.2)
167
+ c += random.uniform(-self.config.sample_noise, self.config.sample_noise)
168
+ return a, b, c, f"symbolic_{branch}"
169
 
170
+ def generate_sample(self, family: Optional[str] = None) -> Dict[str, Any]:
171
+ family = family or self.config.dataset_family
172
+ if family == "housing":
173
+ a, b, c, label = self._sample_housing()
174
+ elif family == "subtraction":
175
+ a, b, c, label = self._sample_subtraction()
176
+ elif family == "multiplication":
177
+ a, b, c, label = self._sample_multiplication()
178
+ elif family == "symbolic":
179
+ a, b, c, label = self._sample_symbolic()
180
+ elif family == "mixed":
181
+ pick = random.choice(["housing", "subtraction", "multiplication", "symbolic"])
182
+ return self.generate_sample(pick)
183
+ else:
184
+ a, b = random.uniform(2, 10), random.uniform(2, 10)
185
+ c, label = a + b, "default_add"
186
+ return {"a": float(a), "b": float(b), "c": float(c), "label": label}
187
+
188
+ def _apply_sample_to_cells(self, sample: Dict[str, Any], anchor_output: bool):
189
+ self.current_sample = sample
190
+ self.last_sample = sample
191
+
192
+ for cell in self.cells:
193
+ cell.a = float(sample["a"])
194
+ cell.b = float(sample["b"])
195
+ cell.target = float(sample["c"]) if sample.get("c") is not None else None
196
+ cell.label = sample.get("label", "")
197
+ cell.anchored = anchor_output
198
+
199
+ if anchor_output:
200
+ cell.c = float(sample["c"])
201
  else:
202
+ cell.c = 0.0
203
+
204
+ cell.prediction = 0.0
205
+ cell.error = 0.0
206
+ cell.energy = 0.0
207
+ cell.force = 0.0
208
+
209
+ def load_sample(self, sample: Dict[str, Any], anchor_output: Optional[bool] = None):
210
+ with self.lock:
211
+ if anchor_output is None:
212
+ anchor_output = self.config.mode == "training"
213
+ self._apply_sample_to_cells(sample, anchor_output=anchor_output)
214
+ self.add_log(
215
+ f"Sample loaded: a={sample['a']:.3f}, b={sample['b']:.3f}, "
216
+ f"c={sample['c']:.3f}, label={sample.get('label', '')}"
217
+ )
218
+
219
+ def _coefficient_snapshot(self, cell: CellState):
220
+ if self.config.coeff_mode == "single_k":
221
+ return {"ka": cell.k, "kb": cell.k, "kc": cell.k}
222
+ if self.config.coeff_mode == "per_edge_k":
223
+ return {"ka": cell.ka, "kb": cell.kb, "kc": 0.0}
224
+ return {"ka": cell.ka, "kb": cell.kb, "kc": cell.kc}
225
+
226
+ def _set_trainable_param(self, cell: CellState, name: str, value: float):
227
+ value = max(-20.0, min(20.0, value))
228
+ if name == "k":
229
+ cell.k = value
230
+ elif name == "ka":
231
+ cell.ka = value
232
+ elif name == "kb":
233
+ cell.kb = value
234
+ elif name == "kc":
235
+ cell.kc = value
236
+
237
+ def _get_trainable_params(self):
238
+ if self.config.coeff_mode == "single_k":
239
+ return ["k"]
240
+ if self.config.coeff_mode == "per_edge_k":
241
+ return ["ka", "kb"]
242
+ return ["ka", "kb", "kc"]
243
+
244
+ def _predict_cell(self, cell: CellState) -> float:
245
+ coeffs = self._coefficient_snapshot(cell)
246
+ a, b = cell.a, cell.b
247
+ arch = self.config.architecture
248
+ ka, kb, kc = coeffs["ka"], coeffs["kb"], coeffs["kc"]
249
+
250
+ if self.config.coeff_mode == "single_k":
251
+ k = cell.k
252
+ if arch == "additive":
253
+ return k * (a + b)
254
+ if arch == "multiplicative":
255
+ return k * (a * b)
256
+ if arch == "affine":
257
+ return (k * a) + (k * b) + k
258
+ if arch == "bilinear":
259
+ return k * (a + b + (a * b))
260
+ if arch == "gated":
261
+ gate = 1.0 / (1.0 + math.exp(-k))
262
+ return gate * (a + b) + (1.0 - gate) * (a * b)
263
+ return k * (a + b)
264
+
265
+ if arch == "additive":
266
+ return (ka * a) + (kb * b) + kc
267
+ if arch == "multiplicative":
268
+ return (ka * a) * (kb * b) + kc
269
+ if arch == "affine":
270
+ return (ka * a) + (kb * b) + kc
271
+ if arch == "bilinear":
272
+ return (ka * a) + (kb * b) + (kc * a * b)
273
+ if arch == "gated":
274
+ gate = 1.0 / (1.0 + math.exp(-kc))
275
+ return gate * ((ka * a) + (kb * b)) + (1.0 - gate) * (a * b)
276
+ return (ka * a) + (kb * b) + kc
277
+
278
+ def _neighbors(self, idx: int):
279
+ if self.config.topology == "single_cell":
280
+ return []
281
+ if self.config.topology == "chain":
282
+ n = []
283
+ if idx - 1 >= 0:
284
+ n.append(idx - 1)
285
+ if idx + 1 < len(self.cells):
286
+ n.append(idx + 1)
287
+ return n
288
+ if self.config.topology == "mesh":
289
+ return [j for j in range(len(self.cells)) if j != idx]
290
+ return []
291
+
292
+ def _cell_loss(self, idx: int, preds: List[float]) -> float:
293
+ cell = self.cells[idx]
294
+ pred = preds[idx]
295
+ loss = 0.0
296
+ if cell.target is not None:
297
+ loss += (pred - cell.target) ** 2
298
+
299
+ neighbors = self._neighbors(idx)
300
+ if neighbors:
301
+ neighbor_mean = sum(preds[j] for j in neighbors) / len(neighbors)
302
+ loss += self.config.coupling * ((pred - neighbor_mean) ** 2)
303
+
304
+ return loss
305
+
306
+ def _numeric_gradient(self, idx: int, param_name: str, eps: float = 1e-4) -> float:
307
+ cell = self.cells[idx]
308
+ old = getattr(cell, param_name)
309
+
310
+ def local_loss() -> float:
311
+ pred = self._predict_cell(cell)
312
+ loss = 0.0
313
+ if cell.target is not None:
314
+ loss += (pred - cell.target) ** 2
315
+ neighbors = self._neighbors(idx)
316
+ if neighbors:
317
+ neighbor_preds = [self._predict_cell(self.cells[j]) for j in neighbors]
318
+ neighbor_mean = sum(neighbor_preds) / len(neighbor_preds)
319
+ loss += self.config.coupling * ((pred - neighbor_mean) ** 2)
320
+ return loss
321
+
322
+ self._set_trainable_param(cell, param_name, old + eps)
323
+ plus = local_loss()
324
+
325
+ self._set_trainable_param(cell, param_name, old - eps)
326
+ minus = local_loss()
327
+
328
+ self._set_trainable_param(cell, param_name, old)
329
+ return (plus - minus) / (2.0 * eps)
330
+
331
+ def _mean(self, xs: List[float]) -> float:
332
+ return sum(xs) / max(1, len(xs))
333
+
334
+ def _load_next_sample_from_batch(self):
335
+ if self.batch_queue:
336
+ sample = self.batch_queue.popleft()
337
+ self._apply_sample_to_cells(sample, anchor_output=(self.config.mode == "training"))
338
+ self.add_log(f"Next batch sample: {sample.get('label', '')}")
339
+ return True
340
+ return False
341
+
342
+ def physics_step(self):
343
+ with self.lock:
344
+ if not self.running or not self.cells:
345
  return False
346
 
347
+ preds = []
348
+ for cell in self.cells:
349
+ pred = self._predict_cell(cell)
350
+ cell.prediction = pred
351
+ preds.append(pred)
352
+
353
+ global_pred = self._mean(preds)
354
+ target_available = self.current_sample is not None and self.current_sample.get("c") is not None
355
+ target = self._mean([c.target for c in self.cells if c.target is not None]) if target_available else None
356
+
357
+ if self.config.mode == "training":
358
+ total_loss = 0.0
359
+
360
+ for idx, cell in enumerate(self.cells):
361
+ cell.target = target if target is not None else cell.target
362
+ cell.error = (cell.prediction - cell.target) if cell.target is not None else 0.0
363
+ cell.energy = cell.error ** 2
364
+
365
+ for param_name in self._get_trainable_params():
366
+ grad = self._numeric_gradient(idx, param_name)
367
+ old = getattr(cell, param_name)
368
+ new_val = old - (self.config.learning_rate * grad)
369
+ new_val = (1.0 - self.config.damping) * new_val + (self.config.damping * old)
370
+ self._set_trainable_param(cell, param_name, new_val)
371
+
372
+ total_loss += self._cell_loss(idx, preds)
373
+
374
+ self.current_loss = total_loss / max(1, len(self.cells))
375
+ self.current_error = (global_pred - target) if target is not None else global_pred
376
+ self.loss_history.append(self.current_loss)
377
+ self.error_history.append(self.current_error)
378
+
379
+ if target_available and abs(self.current_error) < 0.05 and self.current_loss < 0.01:
380
+ self.add_log("Converged on current sample.")
381
+ if self._load_next_sample_from_batch():
382
+ self.iteration += 1
383
+ return True
384
+ self.running = False
385
+ self.add_log("Batch complete.")
386
+ self.iteration += 1
387
+ return False
388
+
389
  else:
390
+ # Inference mode: output node(s) drift toward the predicted state.
391
+ drift_values = []
392
+ for idx, cell in enumerate(self.cells):
393
+ neighbors = self._neighbors(idx)
394
+ neighbor_mean = self._mean([preds[j] for j in neighbors]) if neighbors else pred
395
+
396
+ drift = (pred - cell.c)
397
+ drift += self.config.coupling * (neighbor_mean - cell.c)
398
+
399
+ cell.force = drift
400
+ cell.c += 0.15 * drift
401
+ cell.error = pred - cell.c
402
+ cell.energy = cell.error ** 2
403
+ drift_values.append(abs(drift))
404
+
405
+ self.current_error = self._mean([cell.error for cell in self.cells])
406
+ self.current_loss = self._mean([cell.energy for cell in self.cells])
407
+ self.loss_history.append(self.current_loss)
408
+ self.error_history.append(self.current_error)
409
+
410
+ if self.current_sample and abs(self.current_error) < 0.05 and self.current_loss < 0.01:
411
+ self.add_log("Inference settled.")
412
+ if self._load_next_sample_from_batch():
413
+ self.iteration += 1
414
+ return True
415
+ self.running = False
416
+ self.add_log("Task complete.")
417
+ self.iteration += 1
418
+ return False
419
+
420
+ # If no target exists, stop when drift is tiny.
421
+ if not target_available and self._mean(drift_values) < 0.002:
422
+ self.running = False
423
+ self.add_log("Inference drift stabilized.")
424
+ self.iteration += 1
425
+ return False
426
+
427
+ self.iteration += 1
428
+ return True
429
+
430
+ def start_batch(self, count: int):
431
+ with self.lock:
432
+ self.batch_queue.clear()
433
+ for _ in range(count):
434
+ self.batch_queue.append(self.generate_sample())
435
+ first = self._load_next_sample_from_batch()
436
+ self.running = first
437
+ self.add_log(f"Batch started with {count} samples.")
438
+ return first
439
+
440
+ def set_custom_sample(self, a: float, b: float, c: Optional[float] = None):
441
+ with self.lock:
442
+ sample = {"a": float(a), "b": float(b), "c": float(c) if c is not None else None, "label": "custom"}
443
+ self._apply_sample_to_cells(sample, anchor_output=(self.config.mode == "training" and c is not None))
444
+ self.current_sample = sample
445
+ self.last_sample = sample
446
+ self.running = True
447
+ self.add_log(f"Custom sample loaded: a={a}, b={b}, c={c}")
448
+ return sample
449
+
450
+ def halt(self):
451
+ with self.lock:
452
+ self.running = False
453
+ self.add_log("Engine halted.")
454
+
455
+ def snapshot(self) -> Dict[str, Any]:
456
+ with self.lock:
457
+ return {
458
+ "config": asdict(self.config),
459
+ "running": self.running,
460
+ "iteration": self.iteration,
461
+ "current_error": self.current_error,
462
+ "current_loss": self.current_loss,
463
+ "cells": [c.to_dict() for c in self.cells],
464
+ "logs": self.logs,
465
+ "last_sample": self.last_sample,
466
+ "current_sample": self.current_sample,
467
+ "batch_remaining": len(self.batch_queue),
468
+ "loss_history": list(self.loss_history),
469
+ "error_history": list(self.error_history),
470
+ }
471
 
 
 
472
 
473
  engine = SimEngine()
474
 
475
+
476
  def run_loop():
477
  while True:
478
+ if engine.running:
479
+ engine.physics_step()
480
  time.sleep(0.04)
481
 
482
+
483
  threading.Thread(target=run_loop, daemon=True).start()
484
 
485
+
486
  @app.get("/", response_class=HTMLResponse)
487
+ async def get_ui():
488
+ return FileResponse("index.html")
489
+
490
 
491
  @app.get("/state")
492
  async def get_state():
493
+ return engine.snapshot()
494
+
495
 
496
  @app.post("/config")
497
  async def config(data: dict):
498
+ engine.configure(data)
 
 
 
 
 
499
  return {"ok": True}
500
 
501
+
502
+ @app.post("/example")
503
+ async def example(data: dict):
504
+ family = data.get("dataset_family", engine.config.dataset_family)
505
+ sample = engine.generate_sample(family)
506
+ return sample
507
+
508
+
509
+ @app.post("/generate_batch")
510
+ async def generate_batch(data: dict):
511
+ count = int(data.get("count", engine.config.batch_size))
512
+ engine.start_batch(count)
513
+ return {"ok": True, "count": count}
514
+
515
+
516
+ @app.post("/test_custom")
517
+ async def test_custom(data: dict):
518
+ a = float(data["a"])
519
+ b = float(data["b"])
520
+ c = data.get("c", None)
521
+ c_val = float(c) if c not in [None, "", "null"] else None
522
+ engine.set_custom_sample(a, b, c_val)
523
  return {"ok": True}
524
 
525
+
526
  @app.post("/halt")
527
  async def halt():
528
+ engine.halt()
529
  return {"ok": True}
530
 
531
+
532
  if __name__ == "__main__":
533
  import uvicorn
534
  uvicorn.run(app, host="0.0.0.0", port=7860)