MuhammadMahmoud commited on
Commit
20f9c30
·
1 Parent(s): 55e6c98

Deploy Full Engine

Browse files
README.md CHANGED
@@ -8,6 +8,15 @@ app_port: 7860
8
  pinned: false
9
  ---
10
 
 
 
 
 
 
 
 
 
 
11
  # Awn AI Service
12
 
13
  AI-powered backend service for the Awn platform — connecting families in need with donors and charitable organizations.
 
8
  pinned: false
9
  ---
10
 
11
+ ---
12
+ title: Awn AI Service
13
+ emoji: 🚀
14
+ colorFrom: blue
15
+ colorTo: purple
16
+ sdk: docker
17
+ app_port: 7860
18
+ ---
19
+
20
  # Awn AI Service
21
 
22
  AI-powered backend service for the Awn platform — connecting families in need with donors and charitable organizations.
app/api/admin_ops.py CHANGED
@@ -6,6 +6,10 @@ from app.core.auth import verify_api_key
6
  from app.services.chat.api.llm_router import llm_router, circuit_registry
7
  import app.services.chat.api.llm_router as router_module
8
  from app.core.redis_client import redis_client
 
 
 
 
9
 
10
  logger = logging.getLogger(__name__)
11
 
@@ -28,8 +32,80 @@ class ModelBanReq(BaseModel):
28
  class KillSwitchReq(BaseModel):
29
  active: bool
30
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
 
32
- # ─── Provider Controls ────────────────────────────────────────────────────────
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
 
34
  @router.post("/provider/{name}/disable")
35
  async def disable_provider(name: str):
 
6
  from app.services.chat.api.llm_router import llm_router, circuit_registry
7
  import app.services.chat.api.llm_router as router_module
8
  from app.core.redis_client import redis_client
9
+ from app.core.config import settings, BASE_DIR
10
+ from app.services.chat.api.model_registry import model_registry
11
+ import os
12
+ import re
13
 
14
  logger = logging.getLogger(__name__)
15
 
 
32
  class KillSwitchReq(BaseModel):
33
  active: bool
34
 
35
+ class EnvUpdateReq(BaseModel):
36
+ provider: str
37
+ models_string: str
38
+
39
+
40
+ # ─── Permanent Configuration (.env) ───────────────────────────────────────────
41
+
42
+ def _update_env_file(key: str, new_value: str):
43
+ """Safely updates a key inside the physical .env file. Fails gracefully if Read-Only."""
44
+ env_path = BASE_DIR / ".env"
45
+
46
+ try:
47
+ if not env_path.exists():
48
+ # Fallback creation if it doesn't exist
49
+ with open(env_path, "w", encoding="utf-8") as f:
50
+ f.write(f"{key}={new_value}\n")
51
+ return
52
+
53
+ with open(env_path, "r", encoding="utf-8") as f:
54
+ lines = f.readlines()
55
+
56
+ updated = False
57
+ with open(env_path, "w", encoding="utf-8") as f:
58
+ for line in lines:
59
+ if line.startswith(f"{key}="):
60
+ f.write(f"{key}={new_value}\n")
61
+ updated = True
62
+ else:
63
+ f.write(line)
64
+ if not updated:
65
+ # Key wasn't found, append it
66
+ if lines and not lines[-1].endswith("\n"):
67
+ f.write("\n")
68
+ f.write(f"{key}={new_value}\n")
69
+ except IOError as e:
70
+ logger.warning(f"File system is restricted (e.g. Hugging Face Space). Could not save {key} to disk. Falling back to hot RAM reload only. Error: {e}")
71
+ pass
72
+
73
+ @router.get("/env")
74
+ async def get_env_models():
75
+ """Retrieve current permanent models defined in the core system."""
76
+ return {
77
+ "groq": settings.LLM_MODELS_GROQ,
78
+ "gemini": settings.LLM_MODELS_GEMINI,
79
+ "openai": settings.LLM_MODELS_OPENAI,
80
+ "openrouter": settings.LLM_MODELS_OPENROUTER,
81
+ }
82
 
83
+ @router.post("/env")
84
+ async def update_env_models(payload: EnvUpdateReq):
85
+ """Securely writes default fallback models to physical .env file and hot-reloads."""
86
+ provider = payload.provider.strip().lower()
87
+ mapping = {
88
+ "groq": "LLM_MODELS_GROQ",
89
+ "gemini": "LLM_MODELS_GEMINI",
90
+ "openai": "LLM_MODELS_OPENAI",
91
+ "openrouter": "LLM_MODELS_OPENROUTER",
92
+ }
93
+
94
+ if provider not in mapping:
95
+ raise HTTPException(status_code=400, detail="Invalid provider ID")
96
+
97
+ key = mapping[provider]
98
+
99
+ # Write to physical file securely
100
+ _update_env_file(key, payload.models_string)
101
+
102
+ # Write to local settings memory so it's instantly available everywhere
103
+ setattr(settings, key, payload.models_string)
104
+
105
+ # Reload model_registry defaults instantly to trigger Hot-Reload
106
+ model_registry._load_defaults()
107
+
108
+ return {"status": "success", "msg": f"Updated {key} to {payload.models_string} safely."}
109
 
110
  @router.post("/provider/{name}/disable")
111
  async def disable_provider(name: str):
app/services/chat/api/model_registry.py CHANGED
@@ -79,9 +79,9 @@ class ModelRegistry:
79
  def _load_defaults(self):
80
  """
81
  Default model catalogue.
82
- In the future, this can be loaded from a config file or env variable
83
- so models can be updated without touching source code.
84
  """
 
85
  defaults = {
86
  "groq": [(m.strip(), i) for i, m in enumerate(settings.LLM_MODELS_GROQ.split(",")) if m.strip()],
87
  "gemini": [(m.strip(), i) for i, m in enumerate(settings.LLM_MODELS_GEMINI.split(",")) if m.strip()],
 
79
  def _load_defaults(self):
80
  """
81
  Default model catalogue.
82
+ Loads from config strings and hot-reloads dynamically.
 
83
  """
84
+ self._models.clear()
85
  defaults = {
86
  "groq": [(m.strip(), i) for i, m in enumerate(settings.LLM_MODELS_GROQ.split(",")) if m.strip()],
87
  "gemini": [(m.strip(), i) for i, m in enumerate(settings.LLM_MODELS_GEMINI.split(",")) if m.strip()],
app/services/rag/vector_store.py CHANGED
@@ -54,8 +54,18 @@ class VectorStore:
54
  return False
55
 
56
  if not settings.QDRANT_URL:
57
- logger.info("QDRANT_URL not configured vector store disabled.")
58
- return False
 
 
 
 
 
 
 
 
 
 
59
 
60
  try:
61
  self.client = QdrantClient(
 
54
  return False
55
 
56
  if not settings.QDRANT_URL:
57
+ logger.info("QDRANT_URL not configured. Attempting :memory: fallback for RAG...")
58
+ try:
59
+ self.client = QdrantClient(":memory:")
60
+ self._connected = True
61
+ self._ensure_collection()
62
+ logger.warning("🟢 RAG running in Ephemeral :memory: mode. Vectors will be wiped on restart.")
63
+ return True
64
+ except Exception as exc:
65
+ logger.warning("Failed to fallback to memory Qdrant: %s — vector store disabled.", exc)
66
+ self.client = None
67
+ self._connected = False
68
+ return False
69
 
70
  try:
71
  self.client = QdrantClient(
app/static/dashboard.html CHANGED
@@ -286,6 +286,47 @@
286
  <div class="panel"><div class="kpi-lbl">Success Rate Discrepancy (%)</div><div class="chart-wrap"><canvas id="chartSr"></canvas></div></div>
287
  </div>
288
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
289
  </div>
290
  </main>
291
 
@@ -301,7 +342,7 @@
301
  if(API_KEY) { document.getElementById('authOverlay').classList.remove('active'); setTimeout(() => document.getElementById('authOverlay').style.display='none', 200); }
302
  function saveKey() {
303
  const v = document.getElementById('apiKeyIn').value.trim();
304
- if(v) { API_KEY = v; localStorage.setItem('awn_api_key', v); document.getElementById('authOverlay').classList.remove('active'); setTimeout(() => document.getElementById('authOverlay').style.display='none', 200); fetchLoop(); }
305
  }
306
  function logout() {
307
  localStorage.removeItem('awn_api_key'); API_KEY = null;
@@ -524,7 +565,29 @@
524
  loopTimer = setTimeout(fetchLoop, 5000);
525
  }
526
 
527
- document.addEventListener('DOMContentLoaded', () => { initCharts(); if(API_KEY) fetchLoop(); });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
528
  </script>
529
  </body>
530
  </html>
 
286
  <div class="panel"><div class="kpi-lbl">Success Rate Discrepancy (%)</div><div class="chart-wrap"><canvas id="chartSr"></canvas></div></div>
287
  </div>
288
 
289
+ <!-- .env Configuration Core -->
290
+ <div class="section-title" style="margin-top:40px; color:var(--accent)">
291
+ <span>⚙️ System Configuration Core (.env IDE)</span>
292
+ <span class="tag" style="font-size:11px; background:rgba(59,130,246,0.1); color:var(--accent)">Live Hot-Reloading Enabled</span>
293
+ </div>
294
+ <div class="panel" style="border-color: rgba(59,130,246,0.3); background: rgba(19,22,32,0.95)">
295
+ <div style="color:var(--text-dim); font-size:13px; margin-bottom:20px;">
296
+ These text areas write directly to the <b>.env</b> core file. Changes are hot-reloaded into memory instantly. Separate models by commas.
297
+ </div>
298
+ <div class="grid-2">
299
+ <div style="background:rgba(0,0,0,0.3); padding:16px; border-radius:8px; border:1px solid var(--border);">
300
+ <div class="flex justify-between items-center" style="margin-bottom:12px;">
301
+ <strong style="font-family:var(--font-mono); font-size:13px; color:#a78bfa">LLM_MODELS_OPENROUTER</strong>
302
+ <button class="btn btn-ok" style="padding:4px 8px; font-size:11px" onclick="saveEnvConfig('openrouter')">💾 Save & Reload</button>
303
+ </div>
304
+ <textarea id="env_openrouter" class="form-input" style="height:60px; resize:vertical; font-size:12px;"></textarea>
305
+ </div>
306
+ <div style="background:rgba(0,0,0,0.3); padding:16px; border-radius:8px; border:1px solid var(--border);">
307
+ <div class="flex justify-between items-center" style="margin-bottom:12px;">
308
+ <strong style="font-family:var(--font-mono); font-size:13px; color:#f472b6">LLM_MODELS_GROQ</strong>
309
+ <button class="btn btn-ok" style="padding:4px 8px; font-size:11px" onclick="saveEnvConfig('groq')">💾 Save & Reload</button>
310
+ </div>
311
+ <textarea id="env_groq" class="form-input" style="height:60px; resize:vertical; font-size:12px;"></textarea>
312
+ </div>
313
+ <div style="background:rgba(0,0,0,0.3); padding:16px; border-radius:8px; border:1px solid var(--border);">
314
+ <div class="flex justify-between items-center" style="margin-bottom:12px;">
315
+ <strong style="font-family:var(--font-mono); font-size:13px; color:#34d399">LLM_MODELS_GEMINI</strong>
316
+ <button class="btn btn-ok" style="padding:4px 8px; font-size:11px" onclick="saveEnvConfig('gemini')">💾 Save & Reload</button>
317
+ </div>
318
+ <textarea id="env_gemini" class="form-input" style="height:60px; resize:vertical; font-size:12px;"></textarea>
319
+ </div>
320
+ <div style="background:rgba(0,0,0,0.3); padding:16px; border-radius:8px; border:1px solid var(--border);">
321
+ <div class="flex justify-between items-center" style="margin-bottom:12px;">
322
+ <strong style="font-family:var(--font-mono); font-size:13px; color:#fbbf24">LLM_MODELS_OPENAI</strong>
323
+ <button class="btn btn-ok" style="padding:4px 8px; font-size:11px" onclick="saveEnvConfig('openai')">💾 Save & Reload</button>
324
+ </div>
325
+ <textarea id="env_openai" class="form-input" style="height:60px; resize:vertical; font-size:12px;"></textarea>
326
+ </div>
327
+ </div>
328
+ </div>
329
+
330
  </div>
331
  </main>
332
 
 
342
  if(API_KEY) { document.getElementById('authOverlay').classList.remove('active'); setTimeout(() => document.getElementById('authOverlay').style.display='none', 200); }
343
  function saveKey() {
344
  const v = document.getElementById('apiKeyIn').value.trim();
345
+ if(v) { API_KEY = v; localStorage.setItem('awn_api_key', v); document.getElementById('authOverlay').classList.remove('active'); setTimeout(() => document.getElementById('authOverlay').style.display='none', 200); fetchLoop(); fetchEnvConfig(); }
346
  }
347
  function logout() {
348
  localStorage.removeItem('awn_api_key'); API_KEY = null;
 
565
  loopTimer = setTimeout(fetchLoop, 5000);
566
  }
567
 
568
+ // .env Editor Functions
569
+ async function fetchEnvConfig() {
570
+ if(!API_KEY) return;
571
+ try {
572
+ const res = await fetch('/api/ai/admin/env', {headers: {'X-API-Key': API_KEY}});
573
+ if(res.ok) {
574
+ const data = await res.json();
575
+ if(document.activeElement.tagName !== "TEXTAREA") {
576
+ document.getElementById('env_groq').value = data.groq || '';
577
+ document.getElementById('env_gemini').value = data.gemini || '';
578
+ document.getElementById('env_openai').value = data.openai || '';
579
+ document.getElementById('env_openrouter').value = data.openrouter || '';
580
+ }
581
+ }
582
+ } catch(e) {}
583
+ }
584
+
585
+ async function saveEnvConfig(provider) {
586
+ const str = document.getElementById(`env_${provider}`).value;
587
+ adminCmd('POST', '/api/ai/admin/env', {provider: provider, models_string: str});
588
+ }
589
+
590
+ document.addEventListener('DOMContentLoaded', () => { initCharts(); if(API_KEY) { fetchLoop(); fetchEnvConfig(); } });
591
  </script>
592
  </body>
593
  </html>