Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,176 +1,324 @@
|
|
| 1 |
# app.py
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
Minimal autonomous simulation of the sealed governing equation
|
| 5 |
-
dΞ/dt = -Ξ β(Ξ(Ξ) + Ξ¦(Ξ) - Ξ¨(t) + Ξ©(t))
|
| 6 |
-
|
| 7 |
-
Hugging Face Spaces demo - January 11, 2026
|
| 8 |
-
"""
|
| 9 |
|
| 10 |
import gradio as gr
|
| 11 |
-
import
|
|
|
|
|
|
|
|
|
|
|
|
|
| 12 |
import numpy as np
|
| 13 |
-
import
|
| 14 |
-
from
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
#
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
|
|
|
|
|
|
| 26 |
}
|
| 27 |
|
| 28 |
-
|
|
|
|
|
|
|
| 29 |
|
| 30 |
-
#
|
| 31 |
-
def run_ire_simulation(steps, dt, noise_mag, lam, omega_len, intent):
|
| 32 |
-
device = torch.device("cpu")
|
| 33 |
-
|
| 34 |
-
intent_target = torch.tensor([intent[0], intent[1]], dtype=torch.float32, device=device)
|
| 35 |
-
|
| 36 |
-
Xi = torch.zeros(2, device=device, requires_grad=True)
|
| 37 |
-
Omega = []
|
| 38 |
-
trajectory = [Xi.detach().cpu().numpy().copy()]
|
| 39 |
|
| 40 |
-
|
| 41 |
-
|
|
|
|
| 42 |
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 48 |
|
| 49 |
-
|
| 50 |
|
| 51 |
-
|
| 52 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 53 |
|
| 54 |
-
|
| 55 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 56 |
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
|
| 61 |
-
|
|
|
|
| 62 |
|
| 63 |
-
|
|
|
|
|
|
|
|
|
|
| 64 |
|
|
|
|
|
|
|
| 65 |
|
| 66 |
-
def
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
|
|
|
|
|
|
|
|
|
| 72 |
|
| 73 |
-
|
| 74 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 75 |
|
| 76 |
-
|
| 77 |
-
|
| 78 |
-
|
| 79 |
-
coh_text = f"Coherence: {coh:.3f}"
|
| 80 |
-
if coh < COHERENCE_WARN:
|
| 81 |
-
coh_text += " β drifting"
|
| 82 |
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
|
| 89 |
-
|
| 90 |
-
|
| 91 |
-
|
| 92 |
-
|
| 93 |
-
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
|
| 102 |
-
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
""
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
|
| 113 |
-
|
| 114 |
-
|
| 115 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 116 |
)
|
| 117 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 118 |
with gr.Row():
|
| 119 |
-
with gr.Column(scale=
|
| 120 |
-
gr.
|
| 121 |
-
|
| 122 |
-
|
| 123 |
-
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
step=0.02, label="External noise strength (Ξ¨)")
|
| 127 |
-
|
| 128 |
-
lam = gr.Slider(0.5, 10.0, value=DEFAULT_PARAMS['lambda_'],
|
| 129 |
-
step=0.1, label="Constraint strength (Ξ)")
|
| 130 |
-
|
| 131 |
-
intent_x = gr.Number(value=DEFAULT_PARAMS['intent_x'],
|
| 132 |
-
label="Intent target X")
|
| 133 |
-
intent_y = gr.Number(value=DEFAULT_PARAMS['intent_y'],
|
| 134 |
-
label="Intent target Y")
|
| 135 |
-
|
| 136 |
-
run_btn = gr.Button("Run Autonomous Evolution", variant="primary")
|
| 137 |
-
|
| 138 |
-
with gr.Column(scale=3):
|
| 139 |
-
output_image = gr.Image(label="Manifold Trajectory", type="filepath")
|
| 140 |
-
gr.Markdown(
|
| 141 |
-
"""
|
| 142 |
-
**Legend**
|
| 143 |
-
β’ Green circle β starting point
|
| 144 |
-
β’ Red circle β current position
|
| 145 |
-
β’ Gold star β semantic intent attractor
|
| 146 |
-
β’ Blue path β autonomous trajectory under the sealed equation
|
| 147 |
-
|
| 148 |
-
Higher Ξ β stronger law & refusal of drift
|
| 149 |
-
Higher noise β more chaotic external pressure
|
| 150 |
-
"""
|
| 151 |
-
)
|
| 152 |
-
|
| 153 |
-
# Footer explanation
|
| 154 |
-
gr.Markdown(
|
| 155 |
-
"""
|
| 156 |
-
---
|
| 157 |
-
**Current status**: 2D toy prototype β’ pure mathematical core
|
| 158 |
-
β’ constraint-dominant flow β’ causal memory smoothing β’ stochastic reality pressure
|
| 159 |
-
β’ No language, no high dimensions, no external data β only the law breathing
|
| 160 |
-
"""
|
| 161 |
-
)
|
| 162 |
|
| 163 |
-
|
| 164 |
-
|
| 165 |
-
|
| 166 |
-
img = create_plot(traj, (ix, iy), lam, noise, steps)
|
| 167 |
-
return img
|
| 168 |
|
| 169 |
-
|
| 170 |
-
|
| 171 |
-
|
| 172 |
-
|
| 173 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 174 |
|
| 175 |
if __name__ == "__main__":
|
| 176 |
-
|
|
|
|
| 1 |
# app.py
|
| 2 |
+
# CodexFlow / GVBDMS v3 β’ January 11, 2026
|
| 3 |
+
# Persistent provenance ledger + first gentle IRE influence (coherence check + Ξ© smoothing)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
|
| 5 |
import gradio as gr
|
| 6 |
+
import requests
|
| 7 |
+
import time
|
| 8 |
+
import json
|
| 9 |
+
import hashlib
|
| 10 |
+
import sqlite3
|
| 11 |
import numpy as np
|
| 12 |
+
from typing import Dict, Any, List, Optional, Tuple
|
| 13 |
+
from collections import deque
|
| 14 |
+
|
| 15 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 16 |
+
# CONFIGURATION
|
| 17 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 18 |
+
|
| 19 |
+
DB_PATH = "codexflow_gvbdms_v3.db"
|
| 20 |
+
WORLD_BANK_BASE = "https://api.worldbank.org/v2"
|
| 21 |
+
DEFAULT_YEAR = "2023"
|
| 22 |
+
|
| 23 |
+
INDICATORS = {
|
| 24 |
+
"GDP": "NY.GDP.MKTP.CD",
|
| 25 |
+
"INFLATION": "FP.CPI.TOTL.ZG",
|
| 26 |
+
"POPULATION": "SP.POP.TOTL",
|
| 27 |
}
|
| 28 |
|
| 29 |
+
# Very simple toy intent anchor for first coherence check
|
| 30 |
+
INTENT_ANCHOR = {"stability": 0.92, "transparency": 0.88}
|
| 31 |
+
COHERENCE_THRESHOLD = 0.65 # records below this are refused
|
| 32 |
|
| 33 |
+
OMEGA_MEMORY = deque(maxlen=8) # very light causal smoothing buffer
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
|
| 35 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 36 |
+
# DATABASE LAYER
|
| 37 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 38 |
|
| 39 |
+
def init_db():
|
| 40 |
+
with sqlite3.connect(DB_PATH) as con:
|
| 41 |
+
cur = con.cursor()
|
| 42 |
+
cur.execute("""
|
| 43 |
+
CREATE TABLE IF NOT EXISTS records (
|
| 44 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 45 |
+
ts REAL NOT NULL,
|
| 46 |
+
hash TEXT UNIQUE NOT NULL,
|
| 47 |
+
prev_hash TEXT NOT NULL,
|
| 48 |
+
schema TEXT NOT NULL,
|
| 49 |
+
country TEXT NOT NULL,
|
| 50 |
+
source TEXT NOT NULL,
|
| 51 |
+
reliability REAL NOT NULL,
|
| 52 |
+
latency_s REAL NOT NULL,
|
| 53 |
+
payload_json TEXT NOT NULL,
|
| 54 |
+
metadata_json TEXT NOT NULL,
|
| 55 |
+
coherence_score REAL,
|
| 56 |
+
bytes INTEGER NOT NULL,
|
| 57 |
+
entropy_proxy REAL NOT NULL
|
| 58 |
+
)
|
| 59 |
+
""")
|
| 60 |
+
cur.execute("CREATE INDEX IF NOT EXISTS idx_schema_country ON records(schema, country)")
|
| 61 |
+
cur.execute("CREATE INDEX IF NOT EXISTS idx_ts ON records(ts)")
|
| 62 |
+
print("Database initialized.")
|
| 63 |
|
| 64 |
+
init_db()
|
| 65 |
|
| 66 |
+
def get_tip_hash() -> str:
|
| 67 |
+
with sqlite3.connect(DB_PATH) as con:
|
| 68 |
+
cur = con.cursor()
|
| 69 |
+
cur.execute("SELECT hash FROM records ORDER BY id DESC LIMIT 1")
|
| 70 |
+
row = cur.fetchone()
|
| 71 |
+
return row[0] if row else "GENESIS"
|
| 72 |
|
| 73 |
+
def insert_record(rec: Dict) -> bool:
|
| 74 |
+
try:
|
| 75 |
+
with sqlite3.connect(DB_PATH) as con:
|
| 76 |
+
cur = con.cursor()
|
| 77 |
+
cur.execute("""
|
| 78 |
+
INSERT INTO records (
|
| 79 |
+
ts, hash, prev_hash, schema, country, source, reliability, latency_s,
|
| 80 |
+
payload_json, metadata_json, coherence_score, bytes, entropy_proxy
|
| 81 |
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
| 82 |
+
""", (
|
| 83 |
+
rec["ts"], rec["hash"], rec["prev_hash"], rec["schema"], rec["country"],
|
| 84 |
+
rec["source"], rec["reliability"], rec["latency_s"],
|
| 85 |
+
rec["payload_json"], rec["metadata_json"], rec.get("coherence_score"),
|
| 86 |
+
rec["bytes"], rec["entropy_proxy"]
|
| 87 |
+
))
|
| 88 |
+
return True
|
| 89 |
+
except sqlite3.IntegrityError:
|
| 90 |
+
return False # duplicate hash
|
| 91 |
|
| 92 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 93 |
+
# UTILITIES
|
| 94 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 95 |
|
| 96 |
+
def canonical_bytes(obj: Any) -> bytes:
|
| 97 |
+
return json.dumps(obj, sort_keys=True, separators=(",", ":")).encode('utf-8')
|
| 98 |
|
| 99 |
+
def compute_bit_stats(payload: Dict) -> Tuple[int, float]:
|
| 100 |
+
b = canonical_bytes(payload)
|
| 101 |
+
n = len(b)
|
| 102 |
+
return n, round(len(set(b)) / max(n, 1), 6)
|
| 103 |
|
| 104 |
+
def hash_chain(payload: Dict, prev: str) -> str:
|
| 105 |
+
return hashlib.sha256(canonical_bytes({"payload": payload, "prev": prev})).hexdigest()
|
| 106 |
|
| 107 |
+
def toy_coherence_score(values: Dict[str, float]) -> float:
|
| 108 |
+
"""Extremely simple first coherence proxy against intent anchor"""
|
| 109 |
+
scores = []
|
| 110 |
+
for k, target in INTENT_ANCHOR.items():
|
| 111 |
+
v = values.get(k)
|
| 112 |
+
if v is not None:
|
| 113 |
+
diff = abs(v - target) / max(abs(target), 0.01)
|
| 114 |
+
scores.append(max(0.0, 1.0 - min(1.0, diff)))
|
| 115 |
+
return round(np.mean(scores) if scores else 0.5, 4)
|
| 116 |
|
| 117 |
+
def omega_smooth(key: str, value: float) -> float:
|
| 118 |
+
if not OMEGA_MEMORY:
|
| 119 |
+
OMEGA_MEMORY.append({key: value})
|
| 120 |
+
return value
|
| 121 |
+
prev = OMEGA_MEMORY[-1].get(key, value)
|
| 122 |
+
smoothed = 0.25 * value + 0.75 * prev
|
| 123 |
+
OMEGA_MEMORY.append({key: smoothed})
|
| 124 |
+
return round(smoothed, 6)
|
| 125 |
|
| 126 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 127 |
+
# DATA INGEST & SIGNAL GENERATION
|
| 128 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
|
|
|
|
|
|
|
|
|
| 129 |
|
| 130 |
+
def fetch_macro(country: str, year: str) -> Dict:
|
| 131 |
+
result = {"type": "macro", "country": country, "year": year}
|
| 132 |
+
t0 = time.time()
|
| 133 |
+
for name, code in INDICATORS.items():
|
| 134 |
+
try:
|
| 135 |
+
r = requests.get(
|
| 136 |
+
f"{WORLD_BANK_BASE}/country/{country}/indicator/{code}?format=json&date={year}&per_page=1",
|
| 137 |
+
timeout=7
|
| 138 |
+
).json()
|
| 139 |
+
result[name.lower()] = r[1][0]["value"] if len(r) > 1 and r[1] else None
|
| 140 |
+
except:
|
| 141 |
+
result[name.lower()] = None
|
| 142 |
+
latency = time.time() - t0
|
| 143 |
+
return result, latency
|
| 144 |
+
|
| 145 |
+
def generate_signals(commodity: str, anchor: float, macro: Dict, lag_days: int) -> List[Tuple[Dict, str]]:
|
| 146 |
+
gdp_scale = macro.get("gdp", 1e14) / 1e14 if macro.get("gdp") else 1.0
|
| 147 |
+
supply = anchor * gdp_scale
|
| 148 |
+
price = omega_smooth("price_index", round((supply / 11.0) * gdp_scale, 6))
|
| 149 |
+
|
| 150 |
+
econ = {
|
| 151 |
+
"type": "commodity",
|
| 152 |
+
"commodity": commodity,
|
| 153 |
+
"supply": round(supply, 4),
|
| 154 |
+
"demand": round(supply * 0.95, 4),
|
| 155 |
+
"price_index": price,
|
| 156 |
+
"flow": round(supply * 0.95 * price, 4)
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
friction = abs(econ["supply"] - econ["demand"]) / max(econ["supply"], 1e-9)
|
| 160 |
+
logi = {"type": "logistics", "friction": round(friction, 6)}
|
| 161 |
+
|
| 162 |
+
ener = {"type": "energy", "cost_index": round(price * 0.42, 4),
|
| 163 |
+
"dependency": "high" if commodity.lower() in ["oil","gas"] else "moderate"}
|
| 164 |
+
|
| 165 |
+
sent = {"type": "sentiment", "confidence": omega_smooth("confidence", np.random.uniform(0.62, 0.91))}
|
| 166 |
+
|
| 167 |
+
feat = {
|
| 168 |
+
"type": "features",
|
| 169 |
+
"lag_days": lag_days,
|
| 170 |
+
"projected_price": round(price * (1 + (1 - sent["confidence"]) * 0.07), 6),
|
| 171 |
+
"volatility": round(0.012 * lag_days, 6)
|
| 172 |
+
}
|
| 173 |
+
|
| 174 |
+
return [(econ, "commodity.v1"), (logi, "logistics.v1"), (ener, "energy.v1"),
|
| 175 |
+
(sent, "sentiment.v1"), (feat, "features.v1")]
|
| 176 |
+
|
| 177 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 178 |
+
# CORE TICK FUNCTION (with coherence refusal)
|
| 179 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 180 |
+
|
| 181 |
+
def run_tick(commodity: str, anchor: float, country: str, lag_days: int, use_live: bool, year: str):
|
| 182 |
+
macro, latency = fetch_macro(country, year) if use_live else (
|
| 183 |
+
{"type": "macro", "country": country, "year": year, "gdp": None, "inflation": None, "population": None}, 0.0
|
| 184 |
)
|
| 185 |
|
| 186 |
+
macro_coh = toy_coherence_score({
|
| 187 |
+
"stability": 1.0 - abs(macro.get("inflation", 0) or 0) / 10,
|
| 188 |
+
})
|
| 189 |
+
|
| 190 |
+
if macro_coh < COHERENCE_THRESHOLD:
|
| 191 |
+
return {"status": "refused", "reason": f"Macro coherence too low: {macro_coh:.3f}", "tip": get_tip_hash()}, None
|
| 192 |
+
|
| 193 |
+
macro_rec = {
|
| 194 |
+
"ts": time.time(),
|
| 195 |
+
"hash": hash_chain(macro, get_tip_hash()),
|
| 196 |
+
"prev_hash": get_tip_hash(),
|
| 197 |
+
"schema": "macro.v1",
|
| 198 |
+
"country": country,
|
| 199 |
+
"source": "world_bank" if use_live else "synthetic",
|
| 200 |
+
"reliability": 0.88 if use_live else 0.65,
|
| 201 |
+
"latency_s": round(latency, 4),
|
| 202 |
+
"payload_json": json.dumps(macro, sort_keys=True),
|
| 203 |
+
"metadata_json": json.dumps({"note": "anchor ingest"}, sort_keys=True),
|
| 204 |
+
"coherence_score": macro_coh,
|
| 205 |
+
"bytes": len(canonical_bytes(macro)),
|
| 206 |
+
"entropy_proxy": compute_bit_stats(macro)[1]
|
| 207 |
+
}
|
| 208 |
+
|
| 209 |
+
if not insert_record(macro_rec):
|
| 210 |
+
return {"status": "error", "reason": "duplicate hash"}, None
|
| 211 |
+
|
| 212 |
+
signals = generate_signals(commodity, anchor, macro, lag_days)
|
| 213 |
+
added = 0
|
| 214 |
+
|
| 215 |
+
for payload, schema in signals:
|
| 216 |
+
coh = toy_coherence_score({"stability": 1.0 - payload.get("friction", 0)})
|
| 217 |
+
if coh < COHERENCE_THRESHOLD:
|
| 218 |
+
continue # refuse low-coherence derived signal
|
| 219 |
+
|
| 220 |
+
rec = {
|
| 221 |
+
"ts": time.time(),
|
| 222 |
+
"hash": hash_chain(payload, get_tip_hash()),
|
| 223 |
+
"prev_hash": get_tip_hash(),
|
| 224 |
+
"schema": schema,
|
| 225 |
+
"country": country,
|
| 226 |
+
"source": "derived",
|
| 227 |
+
"reliability": 0.92,
|
| 228 |
+
"latency_s": 0.0,
|
| 229 |
+
"payload_json": json.dumps(payload, sort_keys=True),
|
| 230 |
+
"metadata_json": json.dumps({"linked_macro": macro_rec["hash"]}, sort_keys=True),
|
| 231 |
+
"coherence_score": coh,
|
| 232 |
+
"bytes": len(canonical_bytes(payload)),
|
| 233 |
+
"entropy_proxy": compute_bit_stats(payload)[1]
|
| 234 |
+
}
|
| 235 |
+
|
| 236 |
+
if insert_record(rec):
|
| 237 |
+
added += 1
|
| 238 |
+
|
| 239 |
+
tip = get_tip_hash()
|
| 240 |
+
return {
|
| 241 |
+
"status": "ok",
|
| 242 |
+
"added": added,
|
| 243 |
+
"tip_hash": tip,
|
| 244 |
+
"macro_coherence": macro_coh
|
| 245 |
+
}, tip
|
| 246 |
+
|
| 247 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 248 |
+
# QUERY & CHAT
|
| 249 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 250 |
+
|
| 251 |
+
def query_records(limit: int = 50, schema: str = "ANY", country: str = "ANY") -> List[Dict]:
|
| 252 |
+
limit = max(1, min(int(limit), 300))
|
| 253 |
+
with sqlite3.connect(DB_PATH) as con:
|
| 254 |
+
cur = con.cursor()
|
| 255 |
+
where = []
|
| 256 |
+
params = []
|
| 257 |
+
if schema != "ANY":
|
| 258 |
+
where.append("schema = ?")
|
| 259 |
+
params.append(schema)
|
| 260 |
+
if country != "ANY":
|
| 261 |
+
where.append("country = ?")
|
| 262 |
+
params.append(country)
|
| 263 |
+
|
| 264 |
+
sql = "SELECT ts, hash, prev_hash, schema, country, coherence_score FROM records"
|
| 265 |
+
if where:
|
| 266 |
+
sql += " WHERE " + " AND ".join(where)
|
| 267 |
+
sql += " ORDER BY id DESC LIMIT ?"
|
| 268 |
+
params.append(limit)
|
| 269 |
+
|
| 270 |
+
cur.execute(sql, params)
|
| 271 |
+
rows = cur.fetchall()
|
| 272 |
+
|
| 273 |
+
return [{"ts": r[0], "hash": r[1], "prev": r[2], "schema": r[3], "country": r[4], "coherence": r[5]} for r in rows]
|
| 274 |
+
|
| 275 |
+
def jarvis(message: str, history):
|
| 276 |
+
m = message.lower().strip()
|
| 277 |
+
if "latest" in m or "tip" in m:
|
| 278 |
+
recs = query_records(1)
|
| 279 |
+
return json.dumps(recs[0] if recs else {"status": "empty"}, indent=2)
|
| 280 |
+
if "coherence" in m:
|
| 281 |
+
recs = query_records(20)
|
| 282 |
+
coh_values = [r["coherence"] for r in recs if r["coherence"] is not None]
|
| 283 |
+
return f"Recent coherence (last {len(coh_values)}): mean = {np.mean(coh_values):.3f}" if coh_values else "No coherence data yet"
|
| 284 |
+
return "Commands: latest, tip, coherence"
|
| 285 |
+
|
| 286 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 287 |
+
# GRADIO INTERFACE
|
| 288 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 289 |
+
|
| 290 |
+
with gr.Blocks(title="CodexFlow v3 β’ IRE Influence") as app:
|
| 291 |
+
gr.Markdown("# CodexFlow v3 β’ Provenance Ledger + First IRE Touch")
|
| 292 |
+
gr.Markdown("SQLite β’ Hash chain β’ Bit stats β’ Simple coherence refusal & smoothing")
|
| 293 |
+
|
| 294 |
with gr.Row():
|
| 295 |
+
with gr.Column(scale=2):
|
| 296 |
+
comm = gr.Dropdown(["Gold","Oil","Gas","Wheat","Copper"], "Gold", label="Commodity")
|
| 297 |
+
anch = gr.Number(950, label="Anchor")
|
| 298 |
+
cntry = gr.Textbox("WLD", label="Country")
|
| 299 |
+
lag_d = gr.Slider(1, 365, 7, label="Lag days")
|
| 300 |
+
yr = gr.Textbox(DEFAULT_YEAR, label="Year")
|
| 301 |
+
live = gr.Checkbox(True, label="Live World Bank")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 302 |
|
| 303 |
+
btn = gr.Button("Run Tick", variant="primary")
|
| 304 |
+
res = gr.JSON(label="Result")
|
| 305 |
+
tip = gr.Textbox(label="Tip Hash", interactive=False)
|
|
|
|
|
|
|
| 306 |
|
| 307 |
+
btn.click(run_tick, [comm, anch, cntry, lag_d, live, yr], [res, tip])
|
| 308 |
+
|
| 309 |
+
gr.Markdown("### Query")
|
| 310 |
+
lim = gr.Slider(5, 200, 30, label="Limit")
|
| 311 |
+
sch = gr.Dropdown(["ANY", "macro.v1", "commodity.v1", "features.v1"], "ANY", label="Schema")
|
| 312 |
+
qry_btn = gr.Button("Query")
|
| 313 |
+
out = gr.JSON(label="Records")
|
| 314 |
+
|
| 315 |
+
qry_btn.click(query_records, [lim, sch, cntry], out)
|
| 316 |
+
|
| 317 |
+
with gr.Column(scale=1):
|
| 318 |
+
gr.Markdown("### Jarvis X")
|
| 319 |
+
gr.ChatInterface(jarvis, chatbot=gr.Chatbot(height=400))
|
| 320 |
+
|
| 321 |
+
gr.Markdown("**v3** β’ First coherence check & Ξ© smoothing β’ Still toy-level IRE influence")
|
| 322 |
|
| 323 |
if __name__ == "__main__":
|
| 324 |
+
app.launch()
|