Spaces:
Sleeping
Sleeping
Update code
Browse files- dashboard.py +29 -0
- templates/ai_search.html +22 -0
dashboard.py
CHANGED
|
@@ -2011,6 +2011,35 @@ def api_gemini_status():
|
|
| 2011 |
})
|
| 2012 |
|
| 2013 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2014 |
def fallback_ai_search(query: str):
|
| 2015 |
"""Fallback search when AI is not available."""
|
| 2016 |
conn = get_db()
|
|
|
|
| 2011 |
})
|
| 2012 |
|
| 2013 |
|
| 2014 |
+
@app.route('/api/hybrid/status')
|
| 2015 |
+
def api_hybrid_status():
|
| 2016 |
+
"""Check hybrid search cache status."""
|
| 2017 |
+
try:
|
| 2018 |
+
from hybrid_search import get_hybrid_search
|
| 2019 |
+
hs = get_hybrid_search()
|
| 2020 |
+
|
| 2021 |
+
# Check what's loaded
|
| 2022 |
+
chunk_loaded = hs.chunk_embeddings is not None
|
| 2023 |
+
bm25_loaded = hs.bm25 is not None
|
| 2024 |
+
model_loaded = hs.model is not None
|
| 2025 |
+
|
| 2026 |
+
# Count chunks
|
| 2027 |
+
chunk_count = len(hs.chunk_embeddings) if chunk_loaded else 0
|
| 2028 |
+
|
| 2029 |
+
return jsonify({
|
| 2030 |
+
'chunk_embeddings_loaded': chunk_loaded,
|
| 2031 |
+
'bm25_loaded': bm25_loaded,
|
| 2032 |
+
'model_loaded': model_loaded,
|
| 2033 |
+
'chunk_count': chunk_count,
|
| 2034 |
+
'ready': chunk_loaded or bm25_loaded
|
| 2035 |
+
})
|
| 2036 |
+
except Exception as e:
|
| 2037 |
+
return jsonify({
|
| 2038 |
+
'ready': False,
|
| 2039 |
+
'error': str(e)
|
| 2040 |
+
})
|
| 2041 |
+
|
| 2042 |
+
|
| 2043 |
def fallback_ai_search(query: str):
|
| 2044 |
"""Fallback search when AI is not available."""
|
| 2045 |
conn = get_db()
|
templates/ai_search.html
CHANGED
|
@@ -240,6 +240,7 @@
|
|
| 240 |
<header class="header">
|
| 241 |
<h1>🤖 AI Search</h1>
|
| 242 |
<div class="header-controls">
|
|
|
|
| 243 |
<span id="gemini-status" class="status-badge unavailable">Checking...</span>
|
| 244 |
</div>
|
| 245 |
</header>
|
|
@@ -297,6 +298,26 @@
|
|
| 297 |
</main>
|
| 298 |
|
| 299 |
<script>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 300 |
// Check Gemini status on load
|
| 301 |
async function checkGeminiStatus() {
|
| 302 |
try {
|
|
@@ -441,6 +462,7 @@
|
|
| 441 |
|
| 442 |
// Initialize
|
| 443 |
document.addEventListener('DOMContentLoaded', () => {
|
|
|
|
| 444 |
checkGeminiStatus();
|
| 445 |
document.getElementById('ai-query').focus();
|
| 446 |
});
|
|
|
|
| 240 |
<header class="header">
|
| 241 |
<h1>🤖 AI Search</h1>
|
| 242 |
<div class="header-controls">
|
| 243 |
+
<span id="cache-status" class="status-badge unavailable">Loading...</span>
|
| 244 |
<span id="gemini-status" class="status-badge unavailable">Checking...</span>
|
| 245 |
</div>
|
| 246 |
</header>
|
|
|
|
| 298 |
</main>
|
| 299 |
|
| 300 |
<script>
|
| 301 |
+
// Check cache status on load
|
| 302 |
+
async function checkCacheStatus() {
|
| 303 |
+
const badge = document.getElementById('cache-status');
|
| 304 |
+
try {
|
| 305 |
+
const response = await fetch('/api/hybrid/status');
|
| 306 |
+
const data = await response.json();
|
| 307 |
+
|
| 308 |
+
if (data.ready) {
|
| 309 |
+
badge.className = 'status-badge available';
|
| 310 |
+
badge.innerHTML = `✓ ${data.chunk_count.toLocaleString()} chunks cached`;
|
| 311 |
+
} else {
|
| 312 |
+
badge.className = 'status-badge unavailable';
|
| 313 |
+
badge.innerHTML = '⌛ Loading embeddings...';
|
| 314 |
+
}
|
| 315 |
+
} catch (e) {
|
| 316 |
+
badge.className = 'status-badge unavailable';
|
| 317 |
+
badge.innerHTML = '✗ Cache Error';
|
| 318 |
+
}
|
| 319 |
+
}
|
| 320 |
+
|
| 321 |
// Check Gemini status on load
|
| 322 |
async function checkGeminiStatus() {
|
| 323 |
try {
|
|
|
|
| 462 |
|
| 463 |
// Initialize
|
| 464 |
document.addEventListener('DOMContentLoaded', () => {
|
| 465 |
+
checkCacheStatus();
|
| 466 |
checkGeminiStatus();
|
| 467 |
document.getElementById('ai-query').focus();
|
| 468 |
});
|