everydaytok commited on
Commit
10b4721
·
verified ·
1 Parent(s): 4d4378a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +139 -224
app.py CHANGED
@@ -1,235 +1,150 @@
1
- import numpy as np
2
- import matplotlib.pyplot as plt
3
- import io, base64
4
- from fastapi import FastAPI
5
- from fastapi.responses import HTMLResponse
6
- from sklearn.decomposition import PCA
7
- from sklearn.cluster import AgglomerativeClustering
8
- from sklearn.metrics.pairwise import euclidean_distances
9
  import time
10
-
11
- app = FastAPI()
12
-
13
- class AdaptiveVectorSystem:
14
- def _calculate_score(self, target, constituents):
15
- """
16
- Generates a score (-1.0 to 10.0) representing convergence 'effort'.
17
- """
18
- if len(constituents) == 0:
19
- return -1.0
20
-
21
- # Calculate distances from the calculated center to the points used
22
- dists = np.linalg.norm(constituents - target, axis=1)
23
-
24
- # Mean distance (how far usually)
25
- mean_dist = np.mean(dists)
26
- # Standard Deviation (how chaotic/scattered)
27
- std_dev = np.std(dists)
28
-
29
- # Heuristic: We want a high score for Low Mean and Low Std Dev.
30
- # We normalize based on the mean_dist itself to make it scale-invariant.
31
- # If std_dev is high relative to mean_dist, score drops.
32
 
33
- variation_coefficient = (std_dev / (mean_dist + 1e-9))
 
 
 
 
34
 
35
- # Base score starts at 10
36
- # We penalize for high variation (chaos) and raw distance
37
- penalty = (variation_coefficient * 6.0) + (mean_dist * 0.1)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
 
39
- score = 10.0 - penalty
40
- return max(-1.0, min(10.0, score))
41
-
42
- def predict_point(self, vectors, mode='global'):
43
- data = np.array(vectors)
44
-
45
- # --- GLOBAL MODE ---
46
- # "Force a fit for everyone."
47
- # Great for converged data, terrible for split data.
48
- if mode == 'global':
49
- center = np.mean(data, axis=0)
50
- score = self._calculate_score(center, data)
51
- return center, score, data
52
-
53
- # --- CLUSTER MODE ---
54
- # "Find the strongest gravity well."
55
- elif mode == 'cluster':
56
- # 1. Compute Pairwise Distances to understand the "Scale" of the data
57
- dist_matrix = euclidean_distances(data, data)
58
- # Flatten matrix and remove zeros (self-distance) to get average spacing
59
- all_dists = dist_matrix[np.triu_indices(len(data), k=1)]
60
- avg_global_dist = np.mean(all_dists)
61
-
62
- # 2. DYNAMIC THRESHOLDING
63
- # We say: To belong to a group, points must be significantly closer
64
- # than the global average. (e.g., 0.6 * average)
65
- dynamic_thresh = avg_global_dist * 0.65
66
-
67
- # 3. Cluster with this dynamic threshold
68
- clusterer = AgglomerativeClustering(
69
- n_clusters=None,
70
- metric='euclidean',
71
- linkage='ward',
72
- distance_threshold=dynamic_thresh
73
- )
74
- labels = clusterer.fit_predict(data)
75
-
76
- # 4. Find the "Best" Cluster
77
- # We look for the Largest cluster, but we ignore "Noise" (clusters of size 1 or 2)
78
- unique_labels, counts = np.unique(labels, return_counts=True)
79
 
80
- # Filter out tiny clusters (noise)
81
- valid_clusters = [l for l, c in zip(unique_labels, counts) if c > 2]
 
82
 
83
- if not valid_clusters:
84
- # Fallback if everything is noise: treat everything as one group
85
- return self.predict_point(data, mode='global')
86
-
87
- # Pick largest of the valid clusters
88
- # (You could also pick the 'densest' here, but largest is usually safest)
89
- best_label = max(valid_clusters, key=lambda l: counts[np.where(unique_labels == l)][0])
90
 
91
- # 5. Extract Data
92
- cluster_vectors = data[labels == best_label]
93
- center = np.mean(cluster_vectors, axis=0)
94
- score = self._calculate_score(center, cluster_vectors)
95
-
96
- return center, score, cluster_vectors
97
 
98
- # --- VISUALIZATION LOGIC ---
99
- def generate_plot(mode='global', scenario='split'):
100
- # Generate 128-dimension vectors
101
- np.random.seed(int(time.time())
102
- ) # Consistent seed for demo
103
-
104
- if scenario == 'split':
105
- # Create two dense islands far apart
106
- # Island 1: centered at 0
107
- c1 = np.random.normal(0, 0.5, (100, 128))
108
- # Island 2: centered at 10 (In 128D, distance approx sqrt(128*100) = ~113 units away)
109
- c2 = np.random.normal(8, 0.5, (100, 128))
110
- # Noise: Random scatter
111
- noise = np.random.uniform(-5, 15, (10, 128))
112
- data = np.vstack([c1, c2, noise])
113
- else:
114
- # One Tight Cluster
115
- data = np.random.normal(0, 1.0, (50, 128))
116
-
117
- # Run System
118
- sys = AdaptiveVectorSystem()
119
- center_vec, score, used_vectors = sys.predict_point(data, mode)
120
-
121
- # PCA for 2D View
122
- # Important: Fit PCA on Input + Center so they share the same coordinate space
123
- pca = PCA(n_components=2)
124
- all_points = np.vstack([data, center_vec])
125
- projected = pca.fit_transform(all_points)
126
-
127
- pts_2d = projected[:-1]
128
- center_2d = projected[-1]
129
-
130
- # --- Plotting ---
131
- plt.figure(figsize=(7, 5), facecolor='#202020')
132
- ax = plt.gca()
133
- ax.set_facecolor('#303030')
134
-
135
- # Logic to identify which points were used (for coloring)
136
- # We compare the rows of 'used_vectors' to 'data' to find indices
137
- # Note: In production, pass indices around. For demo, we do a quick check.
138
- is_used = np.zeros(len(data), dtype=bool)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
139
 
140
- # A quick way to mask used vectors using broadcasting approximation
141
- # (Since floats are tricky, we assume exact match from the split)
142
- if mode == 'global':
143
- is_used[:] = True
144
- else:
145
- # Brute force match for visualization accuracy
146
- for uv in used_vectors:
147
- for i, dv in enumerate(data):
148
- if np.array_equal(uv, dv):
149
- is_used[i] = True
150
- break
151
-
152
- # 1. Plot IGNORED points (Grey, transparent)
153
- if not np.all(is_used):
154
- plt.scatter(pts_2d[~is_used, 0], pts_2d[~is_used, 1],
155
- c='#555555', alpha=0.3, s=30, label='Ignored (Noise/Other)')
156
-
157
- # 2. Plot USED points (Bright Cyan)
158
- plt.scatter(pts_2d[is_used, 0], pts_2d[is_used, 1],
159
- c='#00e5ff', alpha=0.8, s=40, edgecolors='none', label='Constituent Inputs')
160
-
161
- # 3. Draw "Gravity Lines" (faint lines from used points to center)
162
- # Only draw lines if there aren't too many points, to keep it clean
163
- if np.sum(is_used) < 100:
164
- for pt in pts_2d[is_used]:
165
- plt.plot([pt[0], center_2d[0]], [pt[1], center_2d[1]],
166
- c='#00e5ff', alpha=0.15, linewidth=1)
167
-
168
- # 4. Plot The PREDICTED POINT (Red X)
169
- plt.scatter(center_2d[0], center_2d[1],
170
- c='#ff3366', s=200, marker='X', edgecolors='white', linewidth=1.5,
171
- label='Generated Vector', zorder=10)
172
-
173
- # Styling
174
- plt.title(f"Mode: {mode.upper()} | Score: {score:.2f}/10", color='white', fontsize=12, pad=10)
175
- plt.grid(True, color='#444444', linestyle='--', alpha=0.5)
176
 
177
- # Legend formatting
178
- leg = plt.legend(facecolor='#303030', edgecolor='#555555', fontsize=8, loc='best')
179
- for text in leg.get_texts():
180
- text.set_color("white")
181
-
182
- # Axis colors
183
- ax.tick_params(axis='x', colors='white')
184
- ax.tick_params(axis='y', colors='white')
185
- for spine in ax.spines.values():
186
- spine.set_edgecolor('#555555')
187
-
188
- buf = io.BytesIO()
189
- plt.savefig(buf, format='png', bbox_inches='tight')
190
- plt.close()
191
- return base64.b64encode(buf.getvalue()).decode('utf-8')
192
-
193
- @app.get("/", response_class=HTMLResponse)
194
- async def root():
195
- img_global = generate_plot('global', 'split')
196
- img_cluster = generate_plot('cluster', 'split')
197
- img_tight = generate_plot('global', 'tight')
198
 
199
- return f"""
200
- <html>
201
- <body style="font-family: 'Segoe UI', sans-serif; background:#121212; color:#e0e0e0; text-align:center; padding:20px;">
202
- <h1 style="margin-bottom:10px;">Vector Convergence System</h1>
203
- <p style="color:#888; margin-bottom:40px;">Dynamic Thresholding Algorithm</p>
204
-
205
- <div style="display:flex; flex-wrap:wrap; justify-content:center; gap:20px;">
206
- <!-- SCENARIO A -->
207
- <div style="background:#1e1e1e; padding:20px; border-radius:12px; border:1px solid #333;">
208
- <h2 style="color:#aaa; border-bottom:1px solid #333; padding-bottom:10px;">Scenario: Split Data</h2>
209
- <div style="display:flex; gap:20px;">
210
- <div>
211
- <h3 style="color:#00e5ff;">Global Mode</h3>
212
- <div style="font-size:0.8em; color:#888; margin-bottom:5px;">Averages everything (Score -1 to 2)</div>
213
- <img src="data:image/png;base64,{img_global}" width="400" style="border-radius:8px;"/>
214
- </div>
215
- <div>
216
- <h3 style="color:#ff3366;">Cluster Mode (Revised)</h3>
217
- <div style="font-size:0.8em; color:#888; margin-bottom:5px;">Identifies largest mass (Score 8 to 10)</div>
218
- <img src="data:image/png;base64,{img_cluster}" width="400" style="border-radius:8px;"/>
219
- </div>
220
- </div>
221
- </div>
222
-
223
- <!-- SCENARIO B -->
224
- <div style="background:#1e1e1e; padding:20px; border-radius:12px; border:1px solid #333;">
225
- <h2 style="color:#aaa; border-bottom:1px solid #333; padding-bottom:10px;">Scenario: Converged Data</h2>
226
- <div>
227
- <h3 style="color:#00e5ff;">Global Mode</h3>
228
- <div style="font-size:0.8em; color:#888; margin-bottom:5px;">Efficient calculation (Score ~10)</div>
229
- <img src="data:image/png;base64,{img_tight}" width="400" style="border-radius:8px;"/>
230
- </div>
231
- </div>
232
- </div>
233
- </body>
234
- </html>
235
- """
 
 
 
 
 
 
 
 
 
1
  import time
2
+ import collections
3
+ import threading
4
+ from flask import Flask, jsonify, request
5
+ from flask_cors import CORS
6
+
7
+ app = Flask(__name__)
8
+ CORS(app)
9
+
10
+ class SimEngine:
11
+ def __init__(self):
12
+ self.nodes = {}
13
+ self.cells =[]
14
+ self.buffer = collections.deque()
15
+ self.running = False
 
 
 
 
 
 
 
 
16
 
17
+ # Dial Dashboard State
18
+ self.mode = 'inference' # 'inference' (free A&B) or 'training' (clamped A&B)
19
+ self.distribution = 'uniform' # 'uniform' (50/50 split) or 'individual' (vertex k-values)
20
+ self.problem_type = 'add' # 'add' or 'mult'
21
+ self.asymmetric = False # dampen retroactive pushes to prevent exploding values
22
 
23
+ self.reset()
24
+
25
+ def reset(self):
26
+ # Initialize semantic 'embeddings' in 3D latent space. X is the logic value for this simple PoC.
27
+ self.nodes = {
28
+ 'A': {'x': 2.0, 'y': 1.0, 'z': 0.0, 'anchored': False, 'k': 1.0},
29
+ 'B': {'x': 3.0, 'y': -1.0, 'z': 0.0, 'anchored': False, 'k': 0.8},
30
+ 'C': {'x': 10.0, 'y': 0.0, 'z': 1.0, 'anchored': True, 'k': 1.0}
31
+ }
32
+ # One mesh cell connects all 3 constraints
33
+ self.cells =[{'id': 'Cell_1', 'a': 'A', 'b': 'B', 'c': 'C'}]
34
+ self.buffer.clear()
35
+ self.logs =[]
36
+ self.iteration = 0
37
+
38
+ def add_log(self, msg):
39
+ self.logs.insert(0, f"Iter {self.iteration}: {msg}")
40
+ if len(self.logs) > 50: self.logs.pop() # Keep log buffer clean
41
+
42
+ def set_problem(self, target_value):
43
+ # Target objective is permanently clamped
44
+ self.nodes['C']['x'] = float(target_value)
45
+ self.nodes['C']['anchored'] = True
46
 
47
+ if self.mode == 'training':
48
+ self.nodes['A']['anchored'] = True
49
+ self.nodes['B']['anchored'] = True
50
+ self.add_log(f"Training initiated. C clamped to: {target_value}")
51
+ else:
52
+ self.nodes['A']['anchored'] = False
53
+ self.nodes['B']['anchored'] = False
54
+ self.add_log(f"Inference initiated. Free mesh calculating towards: {target_value}")
55
+
56
+ self.trigger_cells() # Jumpstart event cascade
57
+
58
+ def trigger_cells(self):
59
+ """Cell mathematical constraint check. Pushes structural 'error' to buffer if not solved."""
60
+ for cell in self.cells:
61
+ na, nb, nc = self.nodes[cell['a']], self.nodes[cell['b']], self.nodes[cell['c']]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
62
 
63
+ # Using basic arithmetic logic in Dim X for visibility
64
+ valA, valB, valC = na['x'], nb['x'], nc['x']
65
+ predictedC = (valA + valB) if self.problem_type == 'add' else (valA * valB)
66
 
67
+ error = predictedC - valC
 
 
 
 
 
 
68
 
69
+ if abs(error) > 0.05: # Yield Limit threshold
70
+ # Add retroactive tension to A and B
71
+ self.buffer.append({'target': cell['a'], 'error_vector': error, 'cell': cell})
72
+ self.buffer.append({'target': cell['b'], 'error_vector': error, 'cell': cell})
 
 
73
 
74
+ def physics_step(self):
75
+ if not self.buffer: return False # Queue is empty, logic equilibrium reached.
76
+
77
+ event = self.buffer.popleft()
78
+ t_id, err = event['target'], event['error_vector']
79
+ t_node = self.nodes[t_id]
80
+
81
+ # Ignore locked objects
82
+ if t_node['anchored'] and self.mode != 'training':
83
+ return True
84
+
85
+ # Implement Asymmetry Rule (Stop resonance explosions)
86
+ direction_damper = 0.3 if self.asymmetric else 1.0
87
+
88
+ if self.mode == 'inference':
89
+ # --- Adapt Topology Geometry ---
90
+ # Moves to minimize structural stress based on dials
91
+ base_force = (-err * 0.02 * direction_damper)
92
+ if self.distribution == 'uniform':
93
+ t_node['x'] += base_force
94
+ t_node['y'] -= base_force * 0.1 # visual spatial twisting
95
+ elif self.distribution == 'individual':
96
+ t_node['x'] += base_force * t_node['k']
97
+
98
+ elif self.mode == 'training':
99
+ # --- Learn Structural Stiffness 'K' ---
100
+ # Node geometry is locked. System adjusts how elastic the point is.
101
+ base_grad = abs(err) * 0.005 * direction_damper
102
+ if self.distribution == 'individual':
103
+ t_node['k'] -= base_grad
104
+
105
+ # Move cascaded, so tell network to re-measure stress.
106
+ self.trigger_cells()
107
+ self.iteration += 1
108
+ return True
109
+
110
+ engine = SimEngine()
111
+
112
+ def run_loop():
113
+ while True:
114
+ if engine.running: engine.physics_step()
115
+ time.sleep(0.015) # Simulated latency/Buffer cycle speed
116
+
117
+ threading.Thread(target=run_loop, daemon=True).start()
118
+
119
+ # --------- INTERFACE API ---------
120
+
121
+ @app.route('/state', methods=['GET'])
122
+ def get_state():
123
+ return jsonify({
124
+ 'nodes': engine.nodes,
125
+ 'buffer_size': len(engine.buffer),
126
+ 'iteration': engine.iteration,
127
+ 'logs': engine.logs,
128
+ 'mode': engine.mode
129
+ })
130
+
131
+ @app.route('/apply_config', methods=['POST'])
132
+ def config():
133
+ data = request.json
134
+ engine.running = False # Pause briefly for logic change
135
 
136
+ engine.mode = data.get('mode', engine.mode)
137
+ engine.distribution = data.get('distribution', engine.distribution)
138
+ engine.problem_type = data.get('problem_type', engine.problem_type)
139
+ engine.asymmetric = data.get('asymmetric', engine.asymmetric)
140
+ target = data.get('target', 10.0)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
141
 
142
+ engine.set_problem(target)
143
+ engine.add_log(f"[DIALS CHANGED]: phase={engine.mode}, split={engine.distribution}, type={engine.problem_type}, async={engine.asymmetric}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
144
 
145
+ engine.running = data.get('is_running', False)
146
+ return jsonify(success=True)
147
+
148
+ if __name__ == '__main__':
149
+ print("Semantic Latent Topography Core started on :5000.")
150
+ app.run(port=7860, debug=True, use_reloader=False)