rairo commited on
Commit
9f73dec
·
verified ·
1 Parent(s): 124ae0b

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +107 -59
main.py CHANGED
@@ -62,13 +62,13 @@ db_ref = db.reference()
62
 
63
  # --- Google GenAI Client Initialization (Gemini 3.0) ---
64
  try:
65
- logger.info("Initializing Google GenAI Client...")
66
  api_key = os.environ.get("Gemini")
67
  if not api_key:
68
  raise ValueError("The 'Gemini' API key is not set.")
69
 
70
  client = genai.Client(api_key=api_key)
71
- logger.info("Google GenAI (Gemini 3.0) Client initialized successfully.")
72
  except Exception as e:
73
  logger.error(f"FATAL: Error initializing GenAI Client: {e}")
74
  exit(1)
@@ -79,6 +79,7 @@ ATHENA_PRO = "gemini-3-pro-image-preview"
79
 
80
  # Grounding / External API
81
  WOLFRAM_APP_ID = os.environ.get("WOLFRAM_APP_ID")
 
82
 
83
  # -----------------------------------------------------------------------------
84
  # 2. HELPER FUNCTIONS & GROUNDING
@@ -114,57 +115,55 @@ def upload_to_storage(data_bytes, destination_blob_name, content_type):
114
  return None
115
 
116
  def query_wolfram_alpha(query):
117
- if not WOLFRAM_APP_ID: return "Grounded in first principles."
118
  try:
119
  url = f"http://api.wolframalpha.com/v1/result?appid={WOLFRAM_APP_ID}&i={query}"
120
  response = requests.get(url, timeout=5)
121
- return response.text if response.status_code == 200 else "Fact-check pending..."
122
- except: return "Grounding in progress."
123
 
124
  # -----------------------------------------------------------------------------
125
- # 3. TITANESS MEDIA ENGINE (CONSOLIDATED MASTER BLUEPRINT)
126
  # -----------------------------------------------------------------------------
127
 
128
  def generate_narration_task(text, uid, epiphany_id, layer_name):
 
129
  if not text: return layer_name, None
130
  try:
131
  api_key = os.environ.get("DEEPGRAM_API_KEY")
132
  if not api_key: return layer_name, None
 
133
  DEEPGRAM_URL = "https://api.deepgram.com/v1/speak?model=aura-luna-en"
134
  headers = {"Authorization": f"Token {api_key}", "Content-Type": "text/plain"}
135
  response = requests.post(DEEPGRAM_URL, headers=headers, data=text.encode('utf-8'))
136
  response.raise_for_status()
137
- path = f"users/{uid}/epiphanies/{epiphany_id}/audio/{layer_name}.mp3"
 
138
  return layer_name, upload_to_storage(response.content, path, 'audio/mpeg')
139
  except Exception as e:
140
  logger.error(f"TTS Task Error [{layer_name}]: {e}")
141
  return layer_name, None
142
 
143
- def generate_master_blueprint_task(subject, full_data, uid, epiphany_id):
144
- """Generates ONE consolidated 4K Technical Blueprint for all 4 layers."""
145
  try:
146
  logger.info(f"Generating Master Blueprint for: {subject}")
147
- # Combine the 4 layer texts into a single prompt for context
148
- summary = (
149
- f"Genesis: {full_data['genesis'][:100]}... "
150
- f"Core: {full_data['scientific_core'][:100]}... "
151
- f"Edge: {full_data['engineering_edge'][:100]}... "
152
- f"Future: {full_data['cross_pollination'][:100]}..."
153
- )
154
 
155
  prompt = (
156
- f"Create a single 4K Master Technical Blueprint for '{subject}'. "
157
- f"The image MUST be divided into 4 clear quadrants or a visual sequence: "
158
- f"1. The Origin (Genesis), 2. The Physics (Core), 3. The Engineering (Edge), 4. The Future (Cross-Pollination). "
159
- f"Context: {summary}. Style: Leonardo Da Vinci sketch meets modern CAD schematic. "
160
- f"Midnight navy background, white-line technical art. Professional engineering labels."
 
161
  )
162
 
163
  response = client.models.generate_content(
164
  model=ATHENA_PRO,
165
  contents=prompt,
166
  config=types.GenerateContentConfig(
167
- tools=[{"google_search": {}}],
168
  image_config=types.ImageConfig(aspect_ratio="16:9", image_size="4K")
169
  )
170
  )
@@ -175,11 +174,11 @@ def generate_master_blueprint_task(subject, full_data, uid, epiphany_id):
175
  return upload_to_storage(image_bytes, path, 'image/png')
176
  return None
177
  except Exception as e:
178
- logger.error(f"Master Blueprint Error: {e}")
179
  return None
180
 
181
  # -----------------------------------------------------------------------------
182
- # 4. CORE ENDPOINTS (EPIPHANY, THEIA, PROXY)
183
  # -----------------------------------------------------------------------------
184
 
185
  @app.route('/api/image-proxy', methods=['GET'])
@@ -193,33 +192,52 @@ def image_proxy():
193
 
194
  @app.route('/api/epiphany/generate', methods=['POST'])
195
  def generate_epiphany():
196
- logger.info(">>> START Titaness generate_epiphany")
197
  uid = verify_token(request.headers.get('Authorization'))
198
  if not uid: return jsonify({'error': 'Unauthorized'}), 401
199
 
200
  user_ref = db_ref.child(f'users/{uid}')
201
  user_data = user_ref.get()
202
- if not user_data or user_data.get('credits', 0) < 4:
203
- return jsonify({'error': 'Need 4 Sparks for Synthesis.'}), 402
 
 
204
 
 
 
 
205
  image_file = request.files['image']
206
  image_bytes = image_file.read()
207
  pil_image = Image.open(io.BytesIO(image_bytes)).convert('RGB')
208
 
209
  try:
210
- # Step 1: Rapid ID
211
- id_prompt = "Identify this precisely. Reply with ONLY the name."
212
  subject = client.models.generate_content(model=ATHENA_FLASH, contents=[id_prompt, pil_image]).text.strip()
 
213
 
214
- # Step 2: Synthesis + Universal Search
215
  physics_fact = query_wolfram_alpha(f"physics laws of {subject}")
216
  synthesis_prompt = f"""
217
- Act as Athena. Analyze '{subject}' grounded in: {physics_fact}.
218
- Style: Richard Feynman.
219
- Tasks:
220
- 1. Search web for 3 sources (URL, Title, 2-sentence Feynman Summary).
 
221
  2. Create 4 Discovery Layers (genesis, scientific_core, engineering_edge, cross_pollination).
222
- Return JSON Object ONLY.
 
 
 
 
 
 
 
 
 
 
 
 
223
  """
224
 
225
  res = client.models.generate_content(
@@ -231,45 +249,58 @@ def generate_epiphany():
231
  )
232
  )
233
 
234
- # Schema Sentinel
235
  raw_json = res.text.strip()
236
  if "```json" in raw_json: raw_json = re.search(r'```json\n(.*?)\n```', raw_json, re.DOTALL).group(1)
237
  data = json.loads(raw_json)
238
  if isinstance(data, list): data = data[0]
239
 
 
 
 
 
 
 
 
 
 
240
  epiphany_id = str(uuid.uuid4())
241
- layers = ['genesis', 'scientific_core', 'engineering_edge', 'cross_pollination']
242
 
243
- # Step 3: Parallel Media (4 Audios + 1 Master Blueprint)
244
  audios = {}
245
  master_blueprint_url = None
 
246
  with ThreadPoolExecutor(max_workers=5) as executor:
247
- aud_futures = [executor.submit(generate_narration_task, data.get(l), uid, epiphany_id, l) for l in layers]
 
 
248
  blu_future = executor.submit(generate_master_blueprint_task, subject, data, uid, epiphany_id)
249
 
250
- for f in aud_futures:
251
  k, v = f.result()
252
  audios[k] = v
253
  master_blueprint_url = blu_future.result()
254
 
255
- # Step 4: Storage
256
- orig_url = upload_to_storage(image_bytes, f"users/{uid}/epiphanies/{epiphany_id}/vision.jpg", 'image/jpeg')
257
  epiphany_record = {
258
  "epiphanyId": epiphany_id,
259
  "uid": uid,
260
- "title": data.get('title', 'System Epiphany'),
261
  "subject": subject,
262
- "imageURL": orig_url,
263
  "masterBlueprint": master_blueprint_url,
264
  "layers": {
265
- l: {"text": data.get(l, ""), "audio": audios.get(l)} for l in layers
266
  },
267
  "scholar": data.get('scholar', []),
268
  "createdAt": datetime.utcnow().isoformat()
269
  }
270
 
271
  db_ref.child(f'epiphanies/{epiphany_id}').set(epiphany_record)
272
- user_ref.update({'credits': user_data.get('credits', 0) - 4})
 
 
273
  return jsonify(epiphany_record), 201
274
 
275
  except Exception as e:
@@ -278,19 +309,26 @@ def generate_epiphany():
278
 
279
  @app.route('/api/epiphany/theia', methods=['POST'])
280
  def theia_sweep():
 
281
  uid = verify_token(request.headers.get('Authorization'))
282
  if not uid: return jsonify({'error': 'Unauthorized'}), 401
283
-
284
  user_ref = db_ref.child(f'users/{uid}')
285
  if user_ref.get().get('credits', 0) < 4:
286
  return jsonify({'error': 'Need 4 Sparks for Theia Sweep.'}), 402
287
 
288
  epiphany_id = request.form.get('epiphanyId')
289
  image_file = request.files['image']
 
290
  context = db_ref.child(f'epiphanies/{epiphany_id}').get() or {}
291
  subject = context.get('subject', 'Complex System')
292
 
293
- sweep_prompt = f"Theia Mode Activation: {subject}. Identify components via Python Code Execution. Return JSON list of annotations: label, coordinates [ymin, xmin, ymax, xmax], and 20-word Feynman Micro-Epiphany."
 
 
 
 
 
294
 
295
  try:
296
  pil_image = Image.open(io.BytesIO(image_file.read())).convert('RGB')
@@ -305,6 +343,7 @@ def theia_sweep():
305
  raw_json = res.text.strip()
306
  if "```json" in raw_json: raw_json = re.search(r'```json\n(.*?)\n```', raw_json, re.DOTALL).group(1)
307
  annotations = json.loads(raw_json)
 
308
  user_ref.update({'credits': user_ref.get().get('credits', 0) - 4})
309
  return jsonify({"annotations": annotations}), 200
310
  except Exception as e:
@@ -318,14 +357,16 @@ def deep_dive():
318
  image_file = request.files['image']
319
  try:
320
  pil_image = Image.open(io.BytesIO(image_file.read())).convert('RGB')
321
- res = client.models.generate_content(model=ATHENA_FLASH, contents=["In 50 words Feynman style, explain this detail.", pil_image])
322
- user_ref = db_ref.child(f'users/{uid}')
323
- user_ref.update({'credits': max(0, (user_ref.get().get('credits', 0) or 0) - 1)})
 
 
324
  return jsonify({"analysis": res.text.strip()}), 200
325
  except Exception as e: return jsonify({'error': str(e)}), 500
326
 
327
  # -----------------------------------------------------------------------------
328
- # 5. CHIRON & SYSTEM UTILS
329
  # -----------------------------------------------------------------------------
330
 
331
  @app.route('/api/user/call-briefing', methods=['GET'])
@@ -334,11 +375,11 @@ def get_chiron_briefing():
334
  if not uid: return jsonify({'error': 'Unauthorized'}), 401
335
  try:
336
  last = db_ref.child('epiphanies').order_by_child('uid').equal_to(uid).limit_to_last(1).get() or {}
337
- ctx = "New seeker."
338
  if last:
339
  e = list(last.values())[0]
340
- ctx = f"Subject: {e['subject']}. Recent Research: {e.get('scholar', [])[:1]}"
341
- prompt = f"Prep Chiron (Socratic Mentor). Context: {ctx}. 4-sentence brief."
342
  res = client.models.generate_content(model=ATHENA_FLASH, contents=[prompt])
343
  return jsonify({"memory_summary": res.text.strip()}), 200
344
  except Exception as e: return jsonify({'error': str(e)}), 500
@@ -360,10 +401,13 @@ def log_usage():
360
  def admin_dashboard():
361
  try:
362
  verify_admin(request.headers.get('Authorization'))
 
 
 
363
  return jsonify({
364
- "users": len(db_ref.child('users').get() or {}),
365
- "epiphanies": len(db_ref.child('epiphanies').get() or {}),
366
- "requests": len([r for r in (db_ref.child('credit_requests').get() or {}).values() if r.get('status') == 'pending'])
367
  })
368
  except Exception as e: return jsonify({'error': str(e)}), 403
369
 
@@ -418,6 +462,10 @@ def list_epiphanies():
418
  res = db_ref.child('epiphanies').order_by_child('uid').equal_to(uid).get() or {}
419
  return jsonify(list(res.values()))
420
 
 
 
 
 
421
  if __name__ == '__main__':
422
- logger.info("Titaness Master Schematic Server Active on 7860...")
423
  app.run(debug=False, host="0.0.0.0", port=7860)
 
62
 
63
  # --- Google GenAI Client Initialization (Gemini 3.0) ---
64
  try:
65
+ logger.info("Initializing Google GenAI Client (Gemini 3.0 Ecosystem)...")
66
  api_key = os.environ.get("Gemini")
67
  if not api_key:
68
  raise ValueError("The 'Gemini' API key is not set.")
69
 
70
  client = genai.Client(api_key=api_key)
71
+ logger.info("Google GenAI Client initialized successfully.")
72
  except Exception as e:
73
  logger.error(f"FATAL: Error initializing GenAI Client: {e}")
74
  exit(1)
 
79
 
80
  # Grounding / External API
81
  WOLFRAM_APP_ID = os.environ.get("WOLFRAM_APP_ID")
82
+ OPENALEX_MAILTO = os.environ.get("OPENALEX_MAILTO", "rairo@sozofix.tech")
83
 
84
  # -----------------------------------------------------------------------------
85
  # 2. HELPER FUNCTIONS & GROUNDING
 
115
  return None
116
 
117
  def query_wolfram_alpha(query):
118
+ if not WOLFRAM_APP_ID: return "Grounded in physical first principles."
119
  try:
120
  url = f"http://api.wolframalpha.com/v1/result?appid={WOLFRAM_APP_ID}&i={query}"
121
  response = requests.get(url, timeout=5)
122
+ return response.text if response.status_code == 200 else "Constants verifying..."
123
+ except: return "Grounding context pending."
124
 
125
  # -----------------------------------------------------------------------------
126
+ # 3. TITANESS MEDIA ENGINE (CONSOLIDATED MASTER BLUEPRINT + ASYNC AUDIO)
127
  # -----------------------------------------------------------------------------
128
 
129
  def generate_narration_task(text, uid, epiphany_id, layer_name):
130
+ """Deepgram Aura-Luna task for Athena's voice."""
131
  if not text: return layer_name, None
132
  try:
133
  api_key = os.environ.get("DEEPGRAM_API_KEY")
134
  if not api_key: return layer_name, None
135
+
136
  DEEPGRAM_URL = "https://api.deepgram.com/v1/speak?model=aura-luna-en"
137
  headers = {"Authorization": f"Token {api_key}", "Content-Type": "text/plain"}
138
  response = requests.post(DEEPGRAM_URL, headers=headers, data=text.encode('utf-8'))
139
  response.raise_for_status()
140
+
141
+ path = f"users/{uid}/epiphanies/{epiphany_id}/narrations/{layer_name}.mp3"
142
  return layer_name, upload_to_storage(response.content, path, 'audio/mpeg')
143
  except Exception as e:
144
  logger.error(f"TTS Task Error [{layer_name}]: {e}")
145
  return layer_name, None
146
 
147
+ def generate_master_blueprint_task(subject, flattened_data, uid, epiphany_id):
148
+ """Nano Banana Pro: Generates ONE consolidated 4K Technical Blueprint schematic."""
149
  try:
150
  logger.info(f"Generating Master Blueprint for: {subject}")
151
+ # Build context for the visual
152
+ context_str = f"Genesis: {flattened_data.get('genesis', '')[:100]}... Core: {flattened_data.get('scientific_core', '')[:100]}..."
 
 
 
 
 
153
 
154
  prompt = (
155
+ f"Generate a single high-fidelity 4K Master Technical Blueprint for '{subject}'. "
156
+ f"Layout: A four-quadrant schematic. Zone 1: Genesis (Origins). Zone 2: Scientific Core (The Physics). "
157
+ f"Zone 3: Engineering Edge (Design Limits). Zone 4: Cross-Pollination (Future Tech-Tree). "
158
+ f"Aesthetic: White-line architectural blueprint on midnight navy background. "
159
+ f"Style: Leonardo Da Vinci meets modern 4K engineering software. High scientific accuracy. "
160
+ f"Context details: {context_str}"
161
  )
162
 
163
  response = client.models.generate_content(
164
  model=ATHENA_PRO,
165
  contents=prompt,
166
  config=types.GenerateContentConfig(
 
167
  image_config=types.ImageConfig(aspect_ratio="16:9", image_size="4K")
168
  )
169
  )
 
174
  return upload_to_storage(image_bytes, path, 'image/png')
175
  return None
176
  except Exception as e:
177
+ logger.error(f"Master Blueprint Engine Error: {e}")
178
  return None
179
 
180
  # -----------------------------------------------------------------------------
181
+ # 4. CORE ENDPOINTS (EPIPHANY GENERATION & THEIA)
182
  # -----------------------------------------------------------------------------
183
 
184
  @app.route('/api/image-proxy', methods=['GET'])
 
192
 
193
  @app.route('/api/epiphany/generate', methods=['POST'])
194
  def generate_epiphany():
195
+ logger.info(">>> TITANESS GENERATION INITIATED")
196
  uid = verify_token(request.headers.get('Authorization'))
197
  if not uid: return jsonify({'error': 'Unauthorized'}), 401
198
 
199
  user_ref = db_ref.child(f'users/{uid}')
200
  user_data = user_ref.get()
201
+
202
+ # 8 Sparks for Synthesis + Feynman Scholar + Consolidated Master Blueprint
203
+ if not user_data or user_data.get('credits', 0) < 8:
204
+ return jsonify({'error': 'Need 8 Sparks for Full Master Synthesis.'}), 402
205
 
206
+ if 'image' not in request.files:
207
+ return jsonify({'error': 'Visual input required.'}), 400
208
+
209
  image_file = request.files['image']
210
  image_bytes = image_file.read()
211
  pil_image = Image.open(io.BytesIO(image_bytes)).convert('RGB')
212
 
213
  try:
214
+ # Step 1: Precise Identification
215
+ id_prompt = "Identify this object or system. Reply with ONLY the name (max 5 words)."
216
  subject = client.models.generate_content(model=ATHENA_FLASH, contents=[id_prompt, pil_image]).text.strip()
217
+ logger.info(f"Synthesis Subject: {subject}")
218
 
219
+ # Step 2: Synthesis + Universal Scholar Search
220
  physics_fact = query_wolfram_alpha(f"physics laws of {subject}")
221
  synthesis_prompt = f"""
222
+ Act as Athena. Reveal the first principles of '{subject}' grounded in: {physics_fact}.
223
+ Style: Richard Feynman. Simple analogies, profound engineering.
224
+
225
+ Tasks:
226
+ 1. Search the web (ArXiv, Patents, Journals) for 3 diverse sources about {subject} via Google Search.
227
  2. Create 4 Discovery Layers (genesis, scientific_core, engineering_edge, cross_pollination).
228
+ 3. For each research source, provide URL, Title, and a 2-sentence Feynman Summary.
229
+
230
+ MANDATORY JSON SCHEMA (FLAT):
231
+ {{
232
+ "title": "string",
233
+ "genesis": "string",
234
+ "scientific_core": "string",
235
+ "engineering_edge": "string",
236
+ "cross_pollination": "string",
237
+ "scholar": [
238
+ {{"title": "string", "url": "string", "feynman_summary": "string"}}
239
+ ]
240
+ }}
241
  """
242
 
243
  res = client.models.generate_content(
 
249
  )
250
  )
251
 
252
+ # --- DATA FLATTENING SENTINEL ---
253
  raw_json = res.text.strip()
254
  if "```json" in raw_json: raw_json = re.search(r'```json\n(.*?)\n```', raw_json, re.DOTALL).group(1)
255
  data = json.loads(raw_json)
256
  if isinstance(data, list): data = data[0]
257
 
258
+ # Handle nesting variations to prevent KeyErrors
259
+ if "epiphany" in data: data = data["epiphany"]
260
+ elif "discovery_layers" in data: data = data["discovery_layers"]
261
+
262
+ # Ensure all required keys exist (Prevent 500 errors in threads)
263
+ required_keys = ['genesis', 'scientific_core', 'engineering_edge', 'cross_pollination']
264
+ for k in required_keys:
265
+ if k not in data: data[k] = f"First principles of the {k.replace('_', ' ')} are unfolding."
266
+
267
  epiphany_id = str(uuid.uuid4())
 
268
 
269
+ # Step 3: Parallel Media (Titaness Execution)
270
  audios = {}
271
  master_blueprint_url = None
272
+
273
  with ThreadPoolExecutor(max_workers=5) as executor:
274
+ # 4 Audio Threads
275
+ aud_futures = [executor.submit(generate_narration_task, data[l], uid, epiphany_id, l) for l in required_keys]
276
+ # 1 Master Blueprint Thread
277
  blu_future = executor.submit(generate_master_blueprint_task, subject, data, uid, epiphany_id)
278
 
279
+ for f in aud_futures:
280
  k, v = f.result()
281
  audios[k] = v
282
  master_blueprint_url = blu_future.result()
283
 
284
+ # Step 4: Storage & Record Persistence
285
+ image_url = upload_to_storage(image_bytes, f"users/{uid}/epiphanies/{epiphany_id}/original.jpg", 'image/jpeg')
286
  epiphany_record = {
287
  "epiphanyId": epiphany_id,
288
  "uid": uid,
289
+ "title": data.get('title', 'Universal Revelation'),
290
  "subject": subject,
291
+ "imageURL": image_url,
292
  "masterBlueprint": master_blueprint_url,
293
  "layers": {
294
+ l: {"text": data[l], "audio": audios.get(l)} for l in required_keys
295
  },
296
  "scholar": data.get('scholar', []),
297
  "createdAt": datetime.utcnow().isoformat()
298
  }
299
 
300
  db_ref.child(f'epiphanies/{epiphany_id}').set(epiphany_record)
301
+ user_ref.update({'credits': user_data.get('credits', 0) - 8})
302
+ logger.info(f"TITANESS SUCCESS: {epiphany_id}")
303
+
304
  return jsonify(epiphany_record), 201
305
 
306
  except Exception as e:
 
309
 
310
  @app.route('/api/epiphany/theia', methods=['POST'])
311
  def theia_sweep():
312
+ """Independent Theia Mode: Bounding Boxes + Micro-Epiphanies via Code Execution."""
313
  uid = verify_token(request.headers.get('Authorization'))
314
  if not uid: return jsonify({'error': 'Unauthorized'}), 401
315
+
316
  user_ref = db_ref.child(f'users/{uid}')
317
  if user_ref.get().get('credits', 0) < 4:
318
  return jsonify({'error': 'Need 4 Sparks for Theia Sweep.'}), 402
319
 
320
  epiphany_id = request.form.get('epiphanyId')
321
  image_file = request.files['image']
322
+
323
  context = db_ref.child(f'epiphanies/{epiphany_id}').get() or {}
324
  subject = context.get('subject', 'Complex System')
325
 
326
+ sweep_prompt = f"""
327
+ Theia Mode Activation: {subject}.
328
+ Use Python to spatially deconstruct this image.
329
+ Identify every functional component. Return JSON list:
330
+ [ {{ "label": "string", "coordinates": [ymin, xmin, ymax, xmax], "micro_epiphany": "20-word Feynman summary" }} ]
331
+ """
332
 
333
  try:
334
  pil_image = Image.open(io.BytesIO(image_file.read())).convert('RGB')
 
343
  raw_json = res.text.strip()
344
  if "```json" in raw_json: raw_json = re.search(r'```json\n(.*?)\n```', raw_json, re.DOTALL).group(1)
345
  annotations = json.loads(raw_json)
346
+
347
  user_ref.update({'credits': user_ref.get().get('credits', 0) - 4})
348
  return jsonify({"annotations": annotations}), 200
349
  except Exception as e:
 
357
  image_file = request.files['image']
358
  try:
359
  pil_image = Image.open(io.BytesIO(image_file.read())).convert('RGB')
360
+ res = client.models.generate_content(
361
+ model=ATHENA_FLASH,
362
+ contents=["Feynman style: Explain this zoomed-in detail in 50 words.", pil_image]
363
+ )
364
+ db_ref.child(f'users/{uid}/credits').set(max(0, (db_ref.child(f'users/{uid}/credits').get() or 0) - 1))
365
  return jsonify({"analysis": res.text.strip()}), 200
366
  except Exception as e: return jsonify({'error': str(e)}), 500
367
 
368
  # -----------------------------------------------------------------------------
369
+ # 5. CHIRON & SYSTEM TOOLS
370
  # -----------------------------------------------------------------------------
371
 
372
  @app.route('/api/user/call-briefing', methods=['GET'])
 
375
  if not uid: return jsonify({'error': 'Unauthorized'}), 401
376
  try:
377
  last = db_ref.child('epiphanies').order_by_child('uid').equal_to(uid).limit_to_last(1).get() or {}
378
+ ctx = "Exploring new frontiers."
379
  if last:
380
  e = list(last.values())[0]
381
+ ctx = f"Subject: {e['subject']}. Recent papers: {e.get('scholar', [])[:1]}"
382
+ prompt = f"Prep Chiron (Mentor). Context: {ctx}. 4-sentence brief for Socratic tutoring."
383
  res = client.models.generate_content(model=ATHENA_FLASH, contents=[prompt])
384
  return jsonify({"memory_summary": res.text.strip()}), 200
385
  except Exception as e: return jsonify({'error': str(e)}), 500
 
401
  def admin_dashboard():
402
  try:
403
  verify_admin(request.headers.get('Authorization'))
404
+ users = db_ref.child('users').get() or {}
405
+ epiphanies = db_ref.child('epiphanies').get() or {}
406
+ requests = db_ref.child('credit_requests').get() or {}
407
  return jsonify({
408
+ "users": len(users),
409
+ "epiphanies": len(epiphanies),
410
+ "pending_spark_requests": len([r for r in requests.values() if r.get('status') == 'pending'])
411
  })
412
  except Exception as e: return jsonify({'error': str(e)}), 403
413
 
 
462
  res = db_ref.child('epiphanies').order_by_child('uid').equal_to(uid).get() or {}
463
  return jsonify(list(res.values()))
464
 
465
+ # -----------------------------------------------------------------------------
466
+ # 7. MAIN EXECUTION
467
+ # -----------------------------------------------------------------------------
468
+
469
  if __name__ == '__main__':
470
+ logger.info("Titaness Paradigm Paradigm Backbone Active on 7860...")
471
  app.run(debug=False, host="0.0.0.0", port=7860)