everydaytok commited on
Commit
54b622e
·
verified ·
1 Parent(s): 70f0561

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +94 -45
app.py CHANGED
@@ -1,7 +1,9 @@
 
1
  import time
 
2
  import collections
3
  import threading
4
- from flask import Flask, jsonify, request
5
  from flask_cors import CORS
6
 
7
  app = Flask(__name__)
@@ -14,95 +16,120 @@ class SimEngine:
14
  self.buffer = collections.deque()
15
  self.running = False
16
 
17
- # Dial Dashboard State
18
- self.mode = 'inference' # 'inference' (free A&B) or 'training' (clamped A&B)
19
- self.distribution = 'uniform' # 'uniform' (50/50 split) or 'individual' (vertex k-values)
20
- self.problem_type = 'add' # 'add' or 'mult'
21
- self.asymmetric = False # dampen retroactive pushes to prevent exploding values
 
 
 
 
 
22
 
23
  self.reset()
24
 
25
  def reset(self):
26
- # Initialize semantic 'embeddings' in 3D latent space. X is the logic value for this simple PoC.
27
  self.nodes = {
28
  'A': {'x': 2.0, 'y': 1.0, 'z': 0.0, 'anchored': False, 'k': 1.0},
29
  'B': {'x': 3.0, 'y': -1.0, 'z': 0.0, 'anchored': False, 'k': 0.8},
30
  'C': {'x': 10.0, 'y': 0.0, 'z': 1.0, 'anchored': True, 'k': 1.0}
31
  }
32
- # One mesh cell connects all 3 constraints
33
  self.cells =[{'id': 'Cell_1', 'a': 'A', 'b': 'B', 'c': 'C'}]
34
  self.buffer.clear()
 
35
  self.logs =[]
36
  self.iteration = 0
37
 
38
  def add_log(self, msg):
39
- self.logs.insert(0, f"Iter {self.iteration}: {msg}")
40
- if len(self.logs) > 50: self.logs.pop() # Keep log buffer clean
41
 
42
  def set_problem(self, target_value):
43
- # Target objective is permanently clamped
44
  self.nodes['C']['x'] = float(target_value)
45
  self.nodes['C']['anchored'] = True
46
 
 
47
  if self.mode == 'training':
48
  self.nodes['A']['anchored'] = True
49
  self.nodes['B']['anchored'] = True
50
- self.add_log(f"Training initiated. C clamped to: {target_value}")
51
- else:
52
  self.nodes['A']['anchored'] = False
53
  self.nodes['B']['anchored'] = False
54
- self.add_log(f"Inference initiated. Free mesh calculating towards: {target_value}")
55
 
56
- self.trigger_cells() # Jumpstart event cascade
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
 
58
  def trigger_cells(self):
59
- """Cell mathematical constraint check. Pushes structural 'error' to buffer if not solved."""
60
  for cell in self.cells:
61
  na, nb, nc = self.nodes[cell['a']], self.nodes[cell['b']], self.nodes[cell['c']]
62
 
63
- # Using basic arithmetic logic in Dim X for visibility
64
  valA, valB, valC = na['x'], nb['x'], nc['x']
65
  predictedC = (valA + valB) if self.problem_type == 'add' else (valA * valB)
66
 
67
  error = predictedC - valC
68
 
69
- if abs(error) > 0.05: # Yield Limit threshold
70
- # Add retroactive tension to A and B
71
  self.buffer.append({'target': cell['a'], 'error_vector': error, 'cell': cell})
72
  self.buffer.append({'target': cell['b'], 'error_vector': error, 'cell': cell})
73
 
74
  def physics_step(self):
75
- if not self.buffer: return False # Queue is empty, logic equilibrium reached.
 
 
 
 
 
 
 
76
 
77
  event = self.buffer.popleft()
78
  t_id, err = event['target'], event['error_vector']
79
  t_node = self.nodes[t_id]
80
 
81
- # Ignore locked objects
82
- if t_node['anchored'] and self.mode != 'training':
83
- return True
84
 
85
- # Implement Asymmetry Rule (Stop resonance explosions)
86
- direction_damper = 0.3 if self.asymmetric else 1.0
87
 
88
  if self.mode == 'inference':
89
- # --- Adapt Topology Geometry ---
90
- # Moves to minimize structural stress based on dials
91
  base_force = (-err * 0.02 * direction_damper)
92
  if self.distribution == 'uniform':
93
  t_node['x'] += base_force
94
- t_node['y'] -= base_force * 0.1 # visual spatial twisting
95
  elif self.distribution == 'individual':
96
  t_node['x'] += base_force * t_node['k']
97
 
98
  elif self.mode == 'training':
99
- # --- Learn Structural Stiffness 'K' ---
100
- # Node geometry is locked. System adjusts how elastic the point is.
101
- base_grad = abs(err) * 0.005 * direction_damper
102
  if self.distribution == 'individual':
103
  t_node['k'] -= base_grad
 
104
 
105
- # Move cascaded, so tell network to re-measure stress.
106
  self.trigger_cells()
107
  self.iteration += 1
108
  return True
@@ -112,11 +139,15 @@ engine = SimEngine()
112
  def run_loop():
113
  while True:
114
  if engine.running: engine.physics_step()
115
- time.sleep(0.015) # Simulated latency/Buffer cycle speed
116
 
117
  threading.Thread(target=run_loop, daemon=True).start()
118
 
119
- # --------- INTERFACE API ---------
 
 
 
 
120
 
121
  @app.route('/state', methods=['GET'])
122
  def get_state():
@@ -125,26 +156,44 @@ def get_state():
125
  'buffer_size': len(engine.buffer),
126
  'iteration': engine.iteration,
127
  'logs': engine.logs,
128
- 'mode': engine.mode
129
  })
130
 
131
  @app.route('/apply_config', methods=['POST'])
132
  def config():
133
  data = request.json
134
- engine.running = False # Pause briefly for logic change
135
-
136
  engine.mode = data.get('mode', engine.mode)
137
  engine.distribution = data.get('distribution', engine.distribution)
138
  engine.problem_type = data.get('problem_type', engine.problem_type)
139
  engine.asymmetric = data.get('asymmetric', engine.asymmetric)
140
- target = data.get('target', 10.0)
141
-
142
- engine.set_problem(target)
143
- engine.add_log(f"[DIALS CHANGED]: phase={engine.mode}, split={engine.distribution}, type={engine.problem_type}, async={engine.asymmetric}")
144
-
145
- engine.running = data.get('is_running', False)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
146
  return jsonify(success=True)
147
 
148
  if __name__ == '__main__':
149
- print("Semantic Latent Topography Core started on :5000.")
150
- app.run(port=7860, debug=True, use_reloader=False)
 
1
+ import os
2
  import time
3
+ import math
4
  import collections
5
  import threading
6
+ from flask import Flask, jsonify, request, send_file
7
  from flask_cors import CORS
8
 
9
  app = Flask(__name__)
 
16
  self.buffer = collections.deque()
17
  self.running = False
18
 
19
+ # State & Toggles
20
+ self.mode = 'inference'
21
+ self.distribution = 'uniform'
22
+ self.problem_type = 'add'
23
+ self.asymmetric = False
24
+
25
+ # Dataset Batch Processor tracking
26
+ self.batch_queue = collections.deque()
27
+ self.batch_results =[]
28
+ self.current_target = None
29
 
30
  self.reset()
31
 
32
  def reset(self):
 
33
  self.nodes = {
34
  'A': {'x': 2.0, 'y': 1.0, 'z': 0.0, 'anchored': False, 'k': 1.0},
35
  'B': {'x': 3.0, 'y': -1.0, 'z': 0.0, 'anchored': False, 'k': 0.8},
36
  'C': {'x': 10.0, 'y': 0.0, 'z': 1.0, 'anchored': True, 'k': 1.0}
37
  }
 
38
  self.cells =[{'id': 'Cell_1', 'a': 'A', 'b': 'B', 'c': 'C'}]
39
  self.buffer.clear()
40
+ self.batch_queue.clear()
41
  self.logs =[]
42
  self.iteration = 0
43
 
44
  def add_log(self, msg):
45
+ self.logs.insert(0, f"[{self.iteration}]: {msg}")
46
+ if len(self.logs) > 60: self.logs.pop()
47
 
48
  def set_problem(self, target_value):
49
+ self.current_target = target_value
50
  self.nodes['C']['x'] = float(target_value)
51
  self.nodes['C']['anchored'] = True
52
 
53
+ # In Training: C is goal. A and B are anchor/hints forcing structural (K) changes.
54
  if self.mode == 'training':
55
  self.nodes['A']['anchored'] = True
56
  self.nodes['B']['anchored'] = True
57
+ else: # Inference: C is goal. A and B float structurally to 'find' the factors
 
58
  self.nodes['A']['anchored'] = False
59
  self.nodes['B']['anchored'] = False
 
60
 
61
+ self.add_log(f"Loading Problem C={target_value} ({self.mode.upper()})")
62
+ self.trigger_cells()
63
+
64
+ def generate_batch(self, size, start_range, end_range):
65
+ """Generates a dataset of Target C problems"""
66
+ self.batch_queue.clear()
67
+ self.batch_results.clear()
68
+ import random
69
+ for _ in range(size):
70
+ self.batch_queue.append(round(random.uniform(start_range, end_range), 2))
71
+
72
+ self.add_log(f"=== BATCH CREATED: {size} Problems ===")
73
+ self.load_next_batch_item()
74
+
75
+ def load_next_batch_item(self):
76
+ if len(self.batch_queue) > 0:
77
+ next_c = self.batch_queue.popleft()
78
+ # Slight random offset so system resets dynamically between batch steps
79
+ self.nodes['A']['x'] += 0.01
80
+ self.set_problem(next_c)
81
+ self.running = True
82
+ else:
83
+ self.running = False
84
+ self.add_log("=== BATCH RUN COMPLETE ===")
85
 
86
  def trigger_cells(self):
 
87
  for cell in self.cells:
88
  na, nb, nc = self.nodes[cell['a']], self.nodes[cell['b']], self.nodes[cell['c']]
89
 
 
90
  valA, valB, valC = na['x'], nb['x'], nc['x']
91
  predictedC = (valA + valB) if self.problem_type == 'add' else (valA * valB)
92
 
93
  error = predictedC - valC
94
 
95
+ if abs(error) > 0.05:
96
+ # Network splits pressure
97
  self.buffer.append({'target': cell['a'], 'error_vector': error, 'cell': cell})
98
  self.buffer.append({'target': cell['b'], 'error_vector': error, 'cell': cell})
99
 
100
  def physics_step(self):
101
+ # Handle Batch Loading if Equilibrium Reached
102
+ if len(self.buffer) == 0:
103
+ if self.current_target is not None and self.running:
104
+ self.add_log(f"Equilibrium found for C={self.current_target}.")
105
+ self.current_target = None
106
+ time.sleep(0.5) # small visualization pause between batch items
107
+ self.load_next_batch_item()
108
+ return False
109
 
110
  event = self.buffer.popleft()
111
  t_id, err = event['target'], event['error_vector']
112
  t_node = self.nodes[t_id]
113
 
114
+ if t_node['anchored'] and self.mode != 'training': return True
 
 
115
 
116
+ # Asymmetric Retroactive Force
117
+ direction_damper = 0.4 if self.asymmetric else 1.0
118
 
119
  if self.mode == 'inference':
 
 
120
  base_force = (-err * 0.02 * direction_damper)
121
  if self.distribution == 'uniform':
122
  t_node['x'] += base_force
123
+ t_node['y'] -= base_force * 0.08
124
  elif self.distribution == 'individual':
125
  t_node['x'] += base_force * t_node['k']
126
 
127
  elif self.mode == 'training':
128
+ base_grad = abs(err) * 0.01 * direction_damper
 
 
129
  if self.distribution == 'individual':
130
  t_node['k'] -= base_grad
131
+ t_node['k'] = max(0.001, min(t_node['k'], 5.0)) # bounding constraints for coefficients
132
 
 
133
  self.trigger_cells()
134
  self.iteration += 1
135
  return True
 
139
  def run_loop():
140
  while True:
141
  if engine.running: engine.physics_step()
142
+ time.sleep(0.02)
143
 
144
  threading.Thread(target=run_loop, daemon=True).start()
145
 
146
+ # --- HOSTING ENDPOINTS ---
147
+ @app.route('/')
148
+ def home():
149
+ """Hugging Face Space loads this UI natively"""
150
+ return send_file("index.html")
151
 
152
  @app.route('/state', methods=['GET'])
153
  def get_state():
 
156
  'buffer_size': len(engine.buffer),
157
  'iteration': engine.iteration,
158
  'logs': engine.logs,
159
+ 'batch_size': len(engine.batch_queue)
160
  })
161
 
162
  @app.route('/apply_config', methods=['POST'])
163
  def config():
164
  data = request.json
 
 
165
  engine.mode = data.get('mode', engine.mode)
166
  engine.distribution = data.get('distribution', engine.distribution)
167
  engine.problem_type = data.get('problem_type', engine.problem_type)
168
  engine.asymmetric = data.get('asymmetric', engine.asymmetric)
169
+ engine.add_log(f"CONFIG UPDATED: mode={engine.mode}")
170
+ return jsonify(success=True)
171
+
172
+ @app.route('/single_run', methods=['POST'])
173
+ def single_run():
174
+ data = request.json
175
+ engine.set_problem(float(data.get('target', 10.0)))
176
+ engine.running = True
177
+ return jsonify(success=True)
178
+
179
+ @app.route('/batch_run', methods=['POST'])
180
+ def batch_run():
181
+ data = request.json
182
+ engine.running = False
183
+ size = int(data.get('batch_size', 10))
184
+ vmin = float(data.get('val_min', -50.0))
185
+ vmax = float(data.get('val_max', 50.0))
186
+ engine.generate_batch(size, vmin, vmax)
187
+ return jsonify(success=True)
188
+
189
+ @app.route('/halt', methods=['POST'])
190
+ def halt_sys():
191
+ engine.running = False
192
+ engine.buffer.clear()
193
+ engine.batch_queue.clear()
194
+ engine.add_log("=== HARD HALT EXEC === Queues Cleared.")
195
  return jsonify(success=True)
196
 
197
  if __name__ == '__main__':
198
+ # Binds correctly to hugging face's public 7860 Port Space Requirements
199
+ app.run(host='0.0.0.0', port=7860)