tekrajchhetri commited on
Commit
26a8e29
·
verified ·
1 Parent(s): f996b54

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -1348
app.py DELETED
@@ -1,1348 +0,0 @@
1
- #!/usr/bin/env python3
2
- """
3
- Ontology Semantic Search — Production Gradio App
4
- =================================================
5
- • UI loads immediately with empty state
6
- • Assets are downloaded only when the user clicks "Download & Load"
7
- • Warm-loads model in background once assets are present
8
- • Semantic search via ChromaDB + SentenceTransformers
9
- • Raw SQL console (SELECT-only by default)
10
- • Rebuild index from SQLite
11
-
12
- Dataset : https://huggingface.co/datasets/sensein/ontology-sqlite-vectorstore
13
- Run : python app_gradio.py
14
- """
15
-
16
- from __future__ import annotations
17
-
18
- import inspect
19
- import os
20
- import re
21
- import shutil
22
- import sqlite3
23
- import threading
24
- import time
25
- from dataclasses import dataclass
26
- from pathlib import Path
27
- from typing import Any, Dict, List, Optional, Tuple
28
-
29
- import pandas as pd
30
- import gradio as gr
31
- import chromadb
32
- from chromadb.config import Settings
33
- from chromadb.errors import InternalError
34
- from sentence_transformers import SentenceTransformer
35
- import torch
36
-
37
- # ─────────────────────────────────────────────────────────────────────────────
38
- # Paths & defaults
39
- # ─────────────────────────────────────────────────────────────────────────────
40
-
41
- CACHE_DIR = Path(os.getenv("ONTOLOGY_CACHE_DIR", "."))
42
- DEFAULT_DB_PATH = str(CACHE_DIR / os.getenv("ONTOLOGY_DB_NAME", "bioportal.db"))
43
- DEFAULT_CHROMA = str(CACHE_DIR / os.getenv("ONTOLOGY_CHROMA_NAME", "bioportal_chroma"))
44
- DEFAULT_COLLECTION = os.getenv("ONTOLOGY_COLLECTION", "ontology_classes")
45
- DEFAULT_MODEL = os.getenv("ONTOLOGY_EMBED_MODEL", "BAAI/bge-base-en-v1.5")
46
-
47
- # All supported models shown in the UI dropdowns
48
- MODEL_CHOICES = [
49
- "BAAI/bge-base-en-v1.5",
50
- "BAAI/bge-large-en-v1.5",
51
- "BAAI/bge-small-en-v1.5",
52
- "sentence-transformers/all-MiniLM-L6-v2",
53
- ]
54
-
55
- HF_DATASET_REPO = "sensein/ontology-sqlite-vectorstore"
56
- HF_DB_FILENAME = "bioportal.db"
57
- HF_CHROMA_ARCHIVE = "bioportal_chroma.tar.gz"
58
-
59
- LOGO_URL = "https://avatars.githubusercontent.com/u/47326880"
60
- QUERY_PREFIX = "Represent this sentence for searching relevant passages: "
61
-
62
- CACHE_DIR.mkdir(parents=True, exist_ok=True)
63
-
64
- # ─────────────────────────────────────────────────────────────────────────────
65
- # CSS
66
- # ─────────────────────────────────────────────────────────────────────────────
67
-
68
- CUSTOM_CSS = """
69
- body, .gradio-container { font-family: 'Inter', system-ui, sans-serif !important; }
70
-
71
- #app-header {
72
- display: flex;
73
- align-items: center;
74
- gap: 20px;
75
- background: linear-gradient(135deg, #1e3a5f 0%, #0f2540 60%, #162d4a 100%);
76
- border-radius: 12px;
77
- padding: 20px 28px;
78
- margin-bottom: 16px;
79
- box-shadow: 0 4px 20px rgba(0,0,0,.25);
80
- }
81
- #app-header img {
82
- width: 56px; height: 56px;
83
- border-radius: 10px;
84
- border: 2px solid rgba(255,255,255,.2);
85
- flex-shrink: 0;
86
- }
87
- #app-header-text h1 {
88
- margin: 0;
89
- font-size: 1.6rem; font-weight: 700;
90
- color: #fff; letter-spacing: -.4px;
91
- }
92
- #app-header-text p {
93
- margin: 4px 0 0;
94
- font-size: .875rem;
95
- color: rgba(255,255,255,.65);
96
- }
97
-
98
- #status-bar textarea {
99
- background: #f0fdf4 !important;
100
- border: 1.5px solid #86efac !important;
101
- border-radius: 8px !important;
102
- color: #166534 !important;
103
- font-size: .82rem !important;
104
- font-family: 'JetBrains Mono', monospace !important;
105
- padding: 6px 12px !important;
106
- min-height: 36px !important;
107
- }
108
-
109
- #sidebar {
110
- background: #f8fafc;
111
- border: 1px solid #e2e8f0;
112
- border-radius: 12px;
113
- padding: 16px;
114
- }
115
-
116
- #query-box textarea {
117
- font-size: 1rem !important;
118
- border: 2px solid #cbd5e1 !important;
119
- border-radius: 10px !important;
120
- padding: 12px !important;
121
- transition: border-color .15s;
122
- }
123
- #query-box textarea:focus { border-color: #3b82f6 !important; }
124
-
125
- #result-table table { font-size: .83rem !important; }
126
- #result-table th {
127
- background: #1e3a5f !important;
128
- color: #fff !important;
129
- font-weight: 600 !important;
130
- position: sticky; top: 0;
131
- }
132
- #result-table tr:hover td { background: #eff6ff !important; }
133
-
134
- #sql-editor textarea {
135
- font-family: 'JetBrains Mono', 'Fira Code', monospace !important;
136
- font-size: .88rem !important;
137
- background: #0f172a !important;
138
- color: #e2e8f0 !important;
139
- border-radius: 8px !important;
140
- border: 1.5px solid #334155 !important;
141
- }
142
-
143
- #dl-log textarea {
144
- background: #fffbeb !important;
145
- border: 1.5px solid #fcd34d !important;
146
- border-radius: 8px !important;
147
- color: #78350f !important;
148
- font-size: .82rem !important;
149
- font-family: 'JetBrains Mono', monospace !important;
150
- }
151
- """
152
-
153
- # ─────────────────────────────────────────────────────────────────────────────
154
- # Device helpers
155
- # ─────────────────────────────────────────────────────────────────────────────
156
-
157
- def pick_device(user_device: Optional[str] = None) -> str:
158
- if user_device and user_device != "auto":
159
- return user_device
160
- if torch.cuda.is_available():
161
- return "cuda"
162
- if hasattr(torch.backends, "mps") and torch.backends.mps.is_available():
163
- return "mps"
164
- return "cpu"
165
-
166
-
167
- def make_embedder(model_name: str, device: str) -> SentenceTransformer:
168
- emb = SentenceTransformer(model_name, device=device)
169
- if device == "cuda":
170
- try:
171
- emb = emb.half()
172
- except Exception:
173
- pass
174
- return emb
175
-
176
-
177
- # ─────────────────────────────────────────────────────────────────────────────
178
- # Safe encode — handles sentence-transformers API differences across versions
179
- # ─────────────────────────────────────────────────────────────────────────────
180
-
181
- _encode_sig_cache: Dict[int, set] = {}
182
-
183
- def _safe_encode(emb: SentenceTransformer, sentences: List[str], *,
184
- normalize_embeddings: bool = True, batch_size: int = 32):
185
- key = id(emb)
186
- if key not in _encode_sig_cache:
187
- _encode_sig_cache[key] = set(inspect.signature(emb.encode).parameters)
188
- accepted = _encode_sig_cache[key]
189
- kw: Dict[str, Any] = {}
190
- if "normalize_embeddings" in accepted: kw["normalize_embeddings"] = normalize_embeddings
191
- if "show_progress_bar" in accepted: kw["show_progress_bar"] = False
192
- if "batch_size" in accepted: kw["batch_size"] = batch_size
193
- if "num_workers" in accepted: kw["num_workers"] = 0
194
- return emb.encode(sentences, **kw)
195
-
196
-
197
- # ─────────────────────────────────────────────────────────────────────────────
198
- # HuggingFace download (called only when user clicks Download & Load)
199
- # ─────────────────────────────────────────────────────────────────────────────
200
-
201
- def _hf_url(filename: str) -> str:
202
- return (
203
- f"https://huggingface.co/datasets/{HF_DATASET_REPO}"
204
- f"/resolve/main/{filename}"
205
- )
206
-
207
-
208
- def download_assets(force: bool = False,
209
- progress_cb=None) -> Tuple[bool, str]:
210
- import urllib.request
211
-
212
- def _log(msg: str):
213
- if progress_cb:
214
- progress_cb(msg)
215
- print(msg)
216
-
217
- db_path = Path(DEFAULT_DB_PATH)
218
- chroma_path = Path(DEFAULT_CHROMA)
219
-
220
- # ── SQLite DB ──────────────────────────────────────────────────────────
221
- if db_path.exists() and not force:
222
- _log(f"SQLite DB already present: {db_path}")
223
- else:
224
- url = _hf_url(HF_DB_FILENAME)
225
- _log(f"Downloading SQLite DB...\n {url}")
226
- db_path.parent.mkdir(parents=True, exist_ok=True)
227
- tmp = db_path.with_suffix(".tmp")
228
- try:
229
- def _hook(count, block, total):
230
- if total > 0:
231
- pct = min(count * block / total * 100, 100)
232
- _log(f" SQLite: {pct:.1f}% "
233
- f"({count*block/1e6:.1f} MB / {total/1e6:.1f} MB)")
234
- urllib.request.urlretrieve(url, tmp, reporthook=_hook)
235
- tmp.rename(db_path)
236
- _log(f"SQLite DB saved -> {db_path}")
237
- except Exception as e:
238
- if tmp.exists():
239
- tmp.unlink()
240
- return False, f"Failed to download SQLite DB: {e}"
241
-
242
- # ── Pre-built Chroma index (optional) ─────────────────────────────────
243
- if chroma_path.exists() and not force:
244
- _log(f"Chroma index already present: {chroma_path}")
245
- else:
246
- url = _hf_url(HF_CHROMA_ARCHIVE)
247
- archive = CACHE_DIR / HF_CHROMA_ARCHIVE
248
- _log(f"Attempting to download pre-built Chroma index...\n {url}")
249
- try:
250
- def _hook2(count, block, total):
251
- if total > 0:
252
- pct = min(count * block / total * 100, 100)
253
- _log(f" Chroma: {pct:.1f}% "
254
- f"({count*block/1e6:.1f} MB / {total/1e6:.1f} MB)")
255
- urllib.request.urlretrieve(url, archive, reporthook=_hook2)
256
- _log("Extracting Chroma index...")
257
- import tarfile
258
- if chroma_path.exists():
259
- shutil.rmtree(chroma_path)
260
- with tarfile.open(archive) as tf:
261
- tf.extractall(CACHE_DIR)
262
- archive.unlink(missing_ok=True)
263
- _log(f"Chroma index extracted -> {chroma_path}")
264
- except Exception as e:
265
- _log(
266
- f"Pre-built index not available ({e}).\n"
267
- "Use the Rebuild Index tab to build it from the DB."
268
- )
269
- if archive.exists():
270
- archive.unlink(missing_ok=True)
271
-
272
- return True, "Assets ready."
273
-
274
-
275
- # ─────────────────────────────────────────────────────────────────────────────
276
- # Runtime singleton
277
- # ─────────────────────────────────────────────────────────────────────────────
278
-
279
- @dataclass
280
- class Runtime:
281
- embedder: SentenceTransformer
282
- col: Any
283
- db_con: sqlite3.Connection
284
- device: str
285
- model: str
286
- collection: str
287
-
288
-
289
- _runtime: Optional[Runtime] = None
290
- _runtime_err: Optional[str] = None
291
- _loading: bool = False # False until explicitly triggered
292
- _load_log: List[str] = []
293
- _runtime_lock = threading.Lock()
294
- _db_lock = threading.Lock()
295
-
296
-
297
- def _log_startup(msg: str):
298
- _load_log.append(msg)
299
- print(msg)
300
-
301
-
302
- def _open_sqlite(path: str) -> sqlite3.Connection:
303
- con = sqlite3.connect(path, check_same_thread=False)
304
- for p in (
305
- "PRAGMA journal_mode=WAL",
306
- "PRAGMA synchronous=NORMAL",
307
- "PRAGMA temp_store=MEMORY",
308
- "PRAGMA cache_size=-65536",
309
- ):
310
- con.execute(p)
311
- return con
312
-
313
-
314
- def _open_chroma(chroma_path: str, collection_name: str):
315
- client = chromadb.PersistentClient(
316
- path=chroma_path,
317
- settings=Settings(anonymized_telemetry=False),
318
- )
319
- col = client.get_or_create_collection(
320
- name=collection_name,
321
- metadata={"hnsw:space": "cosine"},
322
- )
323
- return client, col
324
-
325
-
326
- def _do_load(db_path: str, chroma_path: str, collection: str,
327
- model_name: str, device: str):
328
- global _runtime, _runtime_err, _loading
329
- try:
330
- t0 = time.time()
331
- # Auto-correct model if it doesn't match the stored index dimension
332
- if os.path.exists(chroma_path):
333
- model_name = resolve_model_for_index(chroma_path, collection, model_name)
334
- _log_startup(f"Loading model: {model_name} on {device}")
335
- embedder = make_embedder(model_name, device)
336
- # Warm-up: force PyTorch kernel init in the same thread the encoder runs in
337
- _log_startup("Warming up model (dummy encode)...")
338
- future = _encode_executor.submit(
339
- _safe_encode, embedder, ["warmup"],
340
- normalize_embeddings=True, batch_size=1,
341
- )
342
- future.result(timeout=120)
343
- _log_startup("Model warm-up done.")
344
- _log_startup(f"Model ready ({time.time()-t0:.1f}s). Opening Chroma...")
345
- _, col = _open_chroma(chroma_path, collection)
346
- _log_startup("Chroma ready. Opening SQLite...")
347
- con = _open_sqlite(db_path)
348
- with _runtime_lock:
349
- _runtime = Runtime(
350
- embedder=embedder, col=col, db_con=con,
351
- device=device, model=model_name, collection=collection,
352
- )
353
- _log_startup(
354
- f"Ready in {time.time()-t0:.1f}s | "
355
- f"model={model_name} | dim={detect_collection_dim(chroma_path, collection)}"
356
- )
357
- except Exception as e:
358
- with _runtime_lock:
359
- _runtime_err = f"{type(e).__name__}: {e}"
360
- _log_startup(f"Load failed: {e}")
361
- finally:
362
- _loading = False
363
-
364
-
365
- def get_runtime() -> Tuple[Optional[Runtime], Optional[str]]:
366
- if _loading:
367
- return None, "Loading — please wait a moment and retry."
368
- with _runtime_lock:
369
- if _runtime_err:
370
- return None, f"Error: {_runtime_err}"
371
- if _runtime is None:
372
- return None, (
373
- "No data loaded. "
374
- "Go to the Data & Setup tab to download assets."
375
- )
376
- return _runtime, None
377
-
378
-
379
- def reload_runtime(db_path: str, chroma_path: str, collection: str,
380
- model_name: str, device: str) -> str:
381
- global _runtime, _runtime_err, _loading
382
- _loading = True
383
- with _runtime_lock:
384
- if _runtime:
385
- try:
386
- _runtime.db_con.close()
387
- except Exception:
388
- pass
389
- _runtime = None
390
- _runtime_err = None
391
- _load_log.clear()
392
-
393
- threading.Thread(
394
- target=_do_load,
395
- args=(db_path, chroma_path, collection, model_name, device),
396
- daemon=True,
397
- ).start()
398
-
399
- for _ in range(80): # wait up to 20 s for fast reloads
400
- time.sleep(0.25)
401
- if not _loading:
402
- break
403
-
404
- rt, err = get_runtime()
405
- if err:
406
- return err
407
- return (
408
- f"Ready | model={rt.model} | "
409
- f"device={rt.device} | collection={rt.collection}"
410
- )
411
-
412
-
413
- # ─────────────────────────────────────────────────────────────────────────────
414
- # Dimension → model mapping
415
- # ─────────────────────────────────────────────────────────────────────────────
416
-
417
- # Maps known embedding dimensions to a default model that produces them.
418
- # Used to auto-select the right model when the Chroma index already exists.
419
- _DIM_TO_MODEL: Dict[int, str] = {
420
- 384: "BAAI/bge-small-en-v1.5",
421
- 768: "BAAI/bge-base-en-v1.5",
422
- 1024: "BAAI/bge-large-en-v1.5",
423
- }
424
-
425
-
426
- def detect_collection_dim(chroma_path: str, collection_name: str) -> Optional[int]:
427
- """
428
- Open the existing Chroma collection and return the embedding dimension
429
- by peeking at the first stored vector. Returns None if the collection is
430
- empty or unreadable.
431
- """
432
- try:
433
- client = chromadb.PersistentClient(
434
- path=chroma_path,
435
- settings=Settings(anonymized_telemetry=False),
436
- )
437
- col = client.get_collection(name=collection_name)
438
- peek = col.peek(limit=1)
439
- embs = peek.get("embeddings")
440
- if embs and len(embs) > 0 and len(embs[0]) > 0:
441
- return len(embs[0])
442
- except Exception:
443
- pass
444
- return None
445
-
446
-
447
- def resolve_model_for_index(chroma_path: str, collection_name: str,
448
- requested_model: str) -> str:
449
- """
450
- If the Chroma index exists and its dimension does not match the requested
451
- model, return the best-matching model for the stored dimension instead and
452
- log a warning. Otherwise return the requested model unchanged.
453
- """
454
- dim = detect_collection_dim(chroma_path, collection_name)
455
- if dim is None:
456
- return requested_model # empty or new index — use what was asked
457
-
458
- # Check what dimension the requested model would produce
459
- probe_map = {v: k for k, v in _DIM_TO_MODEL.items()}
460
- requested_dim = probe_map.get(requested_model)
461
-
462
- if requested_dim is not None and requested_dim != dim:
463
- fallback = _DIM_TO_MODEL.get(dim, requested_model)
464
- print(
465
- f"[dim-check] Index has dim={dim}, but '{requested_model}' "
466
- f"produces dim={requested_dim}. "
467
- f"Auto-switching to '{fallback}'."
468
- )
469
- return fallback
470
-
471
- # Unknown model — do a quick encode to verify dimension
472
- if requested_dim is None:
473
- try:
474
- import numpy as np
475
- tmp_emb = SentenceTransformer(requested_model)
476
- test_vec = tmp_emb.encode(["test"], normalize_embeddings=True)
477
- actual_dim = int(test_vec.shape[1])
478
- del tmp_emb
479
- if actual_dim != dim:
480
- fallback = _DIM_TO_MODEL.get(dim, requested_model)
481
- print(
482
- f"[dim-check] Index has dim={dim}, model produces "
483
- f"dim={actual_dim}. Auto-switching to '{fallback}'."
484
- )
485
- return fallback
486
- except Exception:
487
- pass
488
-
489
- return requested_model
490
-
491
-
492
- # ─────────────────────────────────────────────────────────────────────────────
493
- # Startup — check local files only, never download automatically
494
- # ─────────────────────────────────────────────────────────────────────────────
495
-
496
- def _startup():
497
- global _loading, _runtime_err
498
- db_ok = os.path.exists(DEFAULT_DB_PATH)
499
- chroma_ok = os.path.exists(DEFAULT_CHROMA)
500
-
501
- if db_ok and chroma_ok:
502
- _loading = True
503
- _do_load(
504
- DEFAULT_DB_PATH, DEFAULT_CHROMA, DEFAULT_COLLECTION,
505
- DEFAULT_MODEL, pick_device(),
506
- )
507
- elif not db_ok:
508
- _runtime_err = (
509
- f"SQLite DB not found at '{DEFAULT_DB_PATH}'. "
510
- "Use the Data & Setup tab to download assets."
511
- )
512
- else:
513
- _runtime_err = (
514
- f"Chroma index not found at '{DEFAULT_CHROMA}'. "
515
- "Use the Data & Setup tab to download it, "
516
- "or use the Rebuild Index tab to build from the existing DB."
517
- )
518
-
519
-
520
- threading.Thread(target=_startup, daemon=True).start()
521
-
522
-
523
- # ─────────────────────────────────────────────────────────────────────────────
524
- # SQLite helpers
525
- # ─────────────────────────────────────────────────────────────────────────────
526
-
527
- def fetch_synonyms(con: sqlite3.Connection, sqlite_id: int) -> List[str]:
528
- return [r[0] for r in con.execute(
529
- "SELECT synonym FROM synonyms WHERE class_id=? ORDER BY synonym",
530
- (sqlite_id,),
531
- ).fetchall()]
532
-
533
-
534
- def concept_details(con: sqlite3.Connection, class_uri: str) -> Optional[Dict[str, Any]]:
535
- row = con.execute(
536
- "SELECT id, ontology_id, preferred_label, definition, notation, obsolete "
537
- "FROM classes WHERE class_uri=?", (class_uri,),
538
- ).fetchone()
539
- if not row:
540
- return None
541
- cid = int(row[0])
542
- syns = con.execute(
543
- "SELECT synonym, syn_type FROM synonyms "
544
- "WHERE class_id=? ORDER BY synonym", (cid,)
545
- ).fetchall()
546
- parents = con.execute(
547
- "SELECT parent_uri FROM parents "
548
- "WHERE class_id=? ORDER BY parent_uri", (cid,)
549
- ).fetchall()
550
- return {
551
- "sqlite_id": cid,
552
- "ontology": row[1],
553
- "label": row[2],
554
- "definition": row[3],
555
- "notation": row[4],
556
- "obsolete": bool(row[5]),
557
- "synonyms": [{"term": s[0], "type": s[1]} for s in syns],
558
- "parents": [p[0] for p in parents],
559
- }
560
-
561
-
562
- def iter_classes_for_rebuild(con: sqlite3.Connection):
563
- cur = con.execute(
564
- "SELECT id, ontology_id, class_uri, "
565
- "COALESCE(preferred_label,''), "
566
- "COALESCE(notation,''), "
567
- "COALESCE(obsolete,0) FROM classes"
568
- )
569
- for row in cur:
570
- yield int(row[0]), row[1], row[2], row[3], row[4], int(row[5])
571
-
572
-
573
- def fetch_definition(con: sqlite3.Connection, sqlite_id: int) -> str:
574
- row = con.execute(
575
- "SELECT definition FROM classes WHERE id=?", (sqlite_id,)
576
- ).fetchone()
577
- return (row[0] or "").strip() if row else ""
578
-
579
-
580
- def build_embed_text(con: sqlite3.Connection, sqlite_id: int, label: str) -> str:
581
- """
582
- Embedding document structure:
583
- LABEL: <preferred_label>
584
- SYNONYMS: <syn1> | <syn2> | ...
585
- DEFINITION: <full definition text>
586
-
587
- All three sections are included when available so the vector captures
588
- the concept name, all surface forms, and the semantic definition.
589
- """
590
- label = (label or "").strip()
591
- syns = [s.strip() for s in fetch_synonyms(con, sqlite_id) if (s or "").strip()]
592
- defn = fetch_definition(con, sqlite_id)
593
-
594
- parts: List[str] = []
595
-
596
- if label:
597
- parts.append(f"LABEL: {label}")
598
-
599
- if syns:
600
- parts.append("SYNONYMS: " + " | ".join(syns[:64]))
601
-
602
- if defn:
603
- parts.append(f"DEFINITION: {defn}")
604
-
605
- return "\n".join(parts) or f"sqlite_id={sqlite_id}"
606
-
607
-
608
- # ─────────────────────────────────────────────────────────────────────────────
609
- # Query embedding cache (avoids re-encoding identical queries)
610
- # ─────────────────────────────────────────────────────────────────────────────
611
-
612
- import concurrent.futures
613
- from functools import lru_cache
614
- _encode_executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
615
-
616
- @lru_cache(maxsize=256)
617
- def _cached_query_vec(model_id: int, query: str) -> tuple:
618
- """
619
- Encode a query string and return the embedding as a tuple (hashable).
620
- Keyed on model object id so cache is invalidated on model reload.
621
- """
622
- rt, _ = get_runtime()
623
- if rt is None:
624
- return ()
625
- vec = _safe_encode(
626
- rt.embedder, [QUERY_PREFIX + query],
627
- normalize_embeddings=True, batch_size=1,
628
- )
629
- return tuple(vec[0].tolist())
630
-
631
-
632
- # ─────────────────────────────────────────────────────────────────────────────
633
- # Semantic search
634
- # ─────────────────────────────────────────────────────────────────────────────
635
-
636
- def semantic_search(
637
- query: str, *, rt: Runtime,
638
- n_results: int, ontology: Optional[str],
639
- include_obsolete: bool, min_score: Optional[float],
640
- details: bool,
641
- ) -> List[Dict[str, Any]]:
642
- # Use cached embedding if same query was seen before
643
- cached = _cached_query_vec(id(rt.embedder), query)
644
- if cached:
645
- vec = [list(cached)]
646
- else:
647
- vec = _safe_encode(
648
- rt.embedder, [QUERY_PREFIX + query],
649
- normalize_embeddings=True, batch_size=1,
650
- ).tolist()
651
-
652
- where: Dict[str, Any] = {}
653
- if ontology:
654
- where["ontology_id"] = ontology
655
- if not include_obsolete:
656
- where["obsolete"] = "0"
657
-
658
- kw: Dict[str, Any] = {
659
- "query_embeddings": vec,
660
- "n_results": int(n_results),
661
- }
662
- if where:
663
- kw["where"] = where
664
-
665
- res = rt.col.query(**kw)
666
- ids = res.get("ids", [[]])[0]
667
- metadatas = res.get("metadatas", [[]])[0]
668
- distances = res.get("distances", [[]])[0]
669
-
670
- out: List[Dict[str, Any]] = []
671
- with _db_lock:
672
- for i in range(len(ids)):
673
- meta = metadatas[i] or {}
674
- score = round(1.0 - float(distances[i]), 4)
675
- if min_score is not None and score < min_score:
676
- continue
677
- sqlite_id = int(meta.get("sqlite_id") or 0)
678
- class_uri = meta.get("class_uri", "")
679
- item: Dict[str, Any] = {
680
- "score": score,
681
- "ontology": meta.get("ontology_id", ""),
682
- "label": meta.get("preferred_label", ""),
683
- "notation": meta.get("notation", ""),
684
- "uri": class_uri,
685
- "sqlite_id": sqlite_id,
686
- "obsolete": meta.get("obsolete", ""),
687
- "synonyms": fetch_synonyms(rt.db_con, sqlite_id) if sqlite_id else [],
688
- }
689
- if details and class_uri:
690
- item["details"] = concept_details(rt.db_con, class_uri)
691
- out.append(item)
692
- return out
693
-
694
-
695
- def results_to_df(results: List[Dict[str, Any]]) -> pd.DataFrame:
696
- return pd.DataFrame([{
697
- "score": r["score"],
698
- "label": r["label"],
699
- "ontology": r["ontology"],
700
- "notation": r["notation"],
701
- "obsolete": r["obsolete"],
702
- "synonyms": ", ".join((r.get("synonyms") or [])[:5]),
703
- "uri": r["uri"],
704
- "sqlite_id": r["sqlite_id"],
705
- } for r in results])
706
-
707
-
708
- def results_to_md(results: List[Dict[str, Any]], show_details: bool) -> str:
709
- if not results:
710
- return "*No results found.*"
711
- lines: List[str] = []
712
- for idx, r in enumerate(results, 1):
713
- bar = "█" * int(r["score"] * 10) + "░" * (10 - int(r["score"] * 10))
714
- lines.append(
715
- f"### {idx}. {r.get('label','')} "
716
- f"`{r.get('score')}` `{bar}`"
717
- )
718
- lines.append("| Field | Value |")
719
- lines.append("|---|---|")
720
- lines.append(f"| **Ontology** | {r.get('ontology','')} |")
721
- if r.get("notation"):
722
- lines.append(f"| **Notation** | {r['notation']} |")
723
- lines.append(f"| **URI** | `{r.get('uri','')}` |")
724
- lines.append(f"| **Obsolete** | {r.get('obsolete','')} |")
725
- syns = r.get("synonyms") or []
726
- if syns:
727
- head = ", ".join(syns[:15])
728
- tail = f" *...+{len(syns)-15} more*" if len(syns) > 15 else ""
729
- lines.append(f"| **Synonyms** | {head}{tail} |")
730
- if show_details and r.get("details"):
731
- d = r["details"]
732
- if d.get("definition"):
733
- lines += ["", "> **Definition**", f"> {d['definition']}"]
734
- if d.get("parents"):
735
- ps = d["parents"][:20]
736
- tail = (f"\n*...+{len(d['parents'])-20} more*"
737
- if len(d["parents"]) > 20 else "")
738
- lines += [
739
- "", "**Parents**",
740
- "\n".join(f"- `{p}`" for p in ps) + tail,
741
- ]
742
- lines.append("")
743
- return "\n".join(lines)
744
-
745
-
746
- # ─────────────────────────────────────────────────────────────────────────────
747
- # SQL console
748
- # ─────────────────────────────────────────────────────────────────────────────
749
-
750
- _SELECT_RE = re.compile(r"^\s*(with\s+[\s\S]+?\s+)?select\b", re.IGNORECASE)
751
- _FORBID_RE = re.compile(
752
- r"\b(attach|detach|pragma|vacuum|reindex|drop|alter"
753
- r"|create|replace|truncate)\b",
754
- re.IGNORECASE,
755
- )
756
-
757
-
758
- def run_sql(sql: str, con: sqlite3.Connection, *,
759
- select_only: bool = True,
760
- max_rows: int = 1000) -> Tuple[pd.DataFrame, str]:
761
- sql = (sql or "").strip().rstrip(";")
762
- if not sql:
763
- return pd.DataFrame(), "Enter a SQL query."
764
- if select_only:
765
- if not _SELECT_RE.match(sql):
766
- return pd.DataFrame(), "Blocked: only SELECT (or WITH ... SELECT) is allowed."
767
- if _FORBID_RE.search(sql):
768
- return pd.DataFrame(), "Blocked: query contains forbidden keywords."
769
- if max_rows > 0 and not re.search(r"\blimit\b", sql, re.IGNORECASE):
770
- sql = f"{sql} LIMIT {int(max_rows)}"
771
- with _db_lock:
772
- cur = con.execute(sql)
773
- cols = [d[0] for d in (cur.description or [])]
774
- rows = cur.fetchall()
775
- if not cols:
776
- return pd.DataFrame(), f"OK — rows affected: {cur.rowcount}"
777
- return pd.DataFrame(rows, columns=cols), f"OK — {len(rows)} rows returned."
778
-
779
-
780
- # ─────────────────────────────────────────────────────────────────────────────
781
- # Rebuild
782
- # ─────────────────────────────────────────────────────────────────────────────
783
-
784
- def rebuild_chroma(*, db_path: str, chroma_path: str,
785
- collection_name: str,
786
- embedder: SentenceTransformer,
787
- batch_size: int = 256,
788
- progress=None) -> Tuple[str, int]:
789
- if not os.path.exists(db_path):
790
- return f"SQLite DB not found: {db_path}", 0
791
-
792
- if os.path.exists(chroma_path):
793
- shutil.rmtree(chroma_path)
794
- os.makedirs(chroma_path, exist_ok=True)
795
-
796
- client = chromadb.PersistentClient(
797
- path=chroma_path,
798
- settings=Settings(anonymized_telemetry=False),
799
- )
800
- col = client.get_or_create_collection(
801
- name=collection_name,
802
- metadata={"hnsw:space": "cosine"},
803
- )
804
- con = sqlite3.connect(db_path)
805
- total = 0
806
- tick = 0
807
- ids: List[str] = []
808
- docs: List[str] = []
809
- metas: List[Dict[str, Any]] = []
810
-
811
- def _flush():
812
- nonlocal total, tick
813
- emb = _safe_encode(
814
- embedder, docs,
815
- normalize_embeddings=True,
816
- batch_size=min(64, batch_size),
817
- ).tolist()
818
- col.upsert(ids=ids, documents=docs, metadatas=metas, embeddings=emb)
819
- total += len(ids)
820
- ids.clear(); docs.clear(); metas.clear()
821
- if progress is not None:
822
- tick = (tick + 1) % 200
823
- progress(tick / 200.0, desc=f"Indexed {total:,} vectors...")
824
-
825
- for sid, oid, uri, label, notation, obsolete in iter_classes_for_rebuild(con):
826
- ids.append(f"{oid}::{sid}")
827
- docs.append(build_embed_text(con, sid, label))
828
- metas.append({
829
- "ontology_id": oid,
830
- "class_uri": uri,
831
- "preferred_label": label or "",
832
- "notation": notation or "",
833
- "obsolete": str(int(obsolete)),
834
- "sqlite_id": str(sid),
835
- })
836
- if len(ids) >= batch_size:
837
- _flush()
838
-
839
- if ids:
840
- _flush()
841
- if progress is not None:
842
- progress(1.0, desc=f"Done — {total:,} vectors.")
843
- con.close()
844
- return "Rebuild complete.", total
845
-
846
-
847
- # ─────────────────────────────────────────────────────────────────────────────
848
- # UI callbacks
849
- # ─────────────────────────────────────────────────────────────────────────────
850
-
851
- def ui_status_poll() -> str:
852
- if _loading:
853
- steps = [
854
- "[ ] Detecting device...",
855
- "[ ] Loading embedding model (this may take a minute)...",
856
- "[ ] Opening Chroma vector index...",
857
- "[ ] Opening SQLite database...",
858
- ]
859
- # Show how many steps are done based on log content
860
- done = 0
861
- log_text = " ".join(_load_log).lower()
862
- if "loading model" in log_text: done = 1
863
- if "model warm-up done" in log_text: done = 2
864
- if "chroma ready" in log_text: done = 3
865
-
866
- lines = []
867
- for i, step in enumerate(steps):
868
- if i < done:
869
- lines.append(step.replace("[ ]", "[x]"))
870
- elif i == done:
871
- lines.append(step.replace("[ ]", "-->"))
872
- else:
873
- lines.append(step)
874
-
875
- last = _load_log[-1] if _load_log else "Starting..."
876
- lines.append(f"\nLast update: {last}")
877
- return "\n".join(lines)
878
-
879
- rt, err = get_runtime()
880
- if err:
881
- return f"Not ready: {err}"
882
- return (
883
- f"Ready | model={rt.model} | "
884
- f"device={rt.device} | collection={rt.collection} | "
885
- f"dim={detect_collection_dim(DEFAULT_CHROMA, DEFAULT_COLLECTION)}"
886
- )
887
-
888
-
889
- def ui_download_and_reload(force: bool, device_choice: str,
890
- model_name: str):
891
- log: List[str] = []
892
- ok, msg = download_assets(force=force, progress_cb=log.append)
893
- log.append(msg)
894
- if not ok:
895
- joined = "\n".join(log)
896
- return joined, joined
897
- log.append("Loading runtime...")
898
- status = reload_runtime(
899
- DEFAULT_DB_PATH, DEFAULT_CHROMA, DEFAULT_COLLECTION,
900
- model_name.strip(), pick_device(device_choice),
901
- )
902
- log.append(status)
903
- return "\n".join(log), status
904
-
905
-
906
- def ui_warm_load(db_path, chroma_path, collection,
907
- model_name, device_choice) -> str:
908
- db_path = db_path.strip()
909
- chroma_path = chroma_path.strip()
910
- collection = collection.strip()
911
- model_name = model_name.strip()
912
- if not os.path.exists(db_path):
913
- return f"SQLite DB not found: {db_path}"
914
- if not os.path.exists(chroma_path):
915
- return f"Chroma path not found: {chroma_path}"
916
- return reload_runtime(
917
- db_path, chroma_path, collection,
918
- model_name, pick_device(device_choice),
919
- )
920
-
921
-
922
- def ui_search(query, n_results, ontology, include_obsolete,
923
- use_min_score, min_score, details):
924
- t0 = time.time()
925
- query = (query or "").strip()
926
- if not query:
927
- return pd.DataFrame(), "Enter a query.", ""
928
- rt, err = get_runtime()
929
- if err:
930
- return pd.DataFrame(), err, ""
931
- try:
932
- results = semantic_search(
933
- query, rt=rt,
934
- n_results = int(n_results),
935
- ontology = ontology.strip() or None,
936
- include_obsolete = include_obsolete,
937
- min_score = (min_score if use_min_score else None),
938
- details = details,
939
- )
940
- dt = (time.time() - t0) * 1000
941
- return (
942
- results_to_df(results),
943
- f"{len(results)} results | {dt:.0f} ms | device={rt.device}",
944
- results_to_md(results, details),
945
- )
946
- except InternalError as e:
947
- corrupted = any(p in str(e).lower()
948
- for p in ["eof", "pickle", "hnsw"])
949
- msg = (
950
- "Chroma index appears corrupted. Use the Rebuild tab."
951
- if corrupted else f"Chroma error: {e}"
952
- )
953
- return pd.DataFrame(), msg, ""
954
- except Exception as e:
955
- return pd.DataFrame(), f"{type(e).__name__}: {e}", ""
956
-
957
-
958
- def ui_sql(sql, select_only, max_rows):
959
- rt, err = get_runtime()
960
- if err:
961
- return pd.DataFrame(), err
962
- try:
963
- return run_sql(
964
- sql, rt.db_con,
965
- select_only=select_only,
966
- max_rows=int(max_rows),
967
- )
968
- except Exception as e:
969
- return pd.DataFrame(), f"{type(e).__name__}: {e}"
970
-
971
-
972
- def ui_rebuild(db_path, chroma_path, collection, model_name,
973
- device_choice, batch_size, progress=gr.Progress()):
974
- rt, err = get_runtime()
975
- if err:
976
- return err, 0
977
- db_path = db_path.strip()
978
- chroma_path = chroma_path.strip()
979
- collection = collection.strip()
980
- if not os.path.exists(db_path):
981
- return f"SQLite DB not found: {db_path}", 0
982
- try:
983
- status, total = rebuild_chroma(
984
- db_path=db_path, chroma_path=chroma_path,
985
- collection_name=collection, embedder=rt.embedder,
986
- batch_size=int(batch_size), progress=progress,
987
- )
988
- reload_runtime(
989
- db_path, chroma_path, collection,
990
- model_name.strip(), pick_device(device_choice),
991
- )
992
- return f"{status} ({total:,} vectors)", total
993
- except Exception as e:
994
- return f"{type(e).__name__}: {e}", 0
995
-
996
-
997
- # ─────────────────────────────────────────────────────────────────────────────
998
- # Build theme (outside Blocks — works for all Gradio versions)
999
- # ─────────────────────────────────────────────────────────────────────────────
1000
-
1001
- try:
1002
- _theme = gr.themes.Base(
1003
- primary_hue=gr.themes.colors.blue,
1004
- secondary_hue=gr.themes.colors.slate,
1005
- neutral_hue=gr.themes.colors.slate,
1006
- font=[gr.themes.GoogleFont("Inter"), "system-ui", "sans-serif"],
1007
- font_mono=[gr.themes.GoogleFont("JetBrains Mono"), "monospace"],
1008
- )
1009
- except Exception:
1010
- _theme = "default"
1011
-
1012
-
1013
- # ─────────────────────────────────────────────────────────────────────────────
1014
- # Gradio UI
1015
- # ─────────────────────────────────────────────────────────────────────────────
1016
-
1017
- with gr.Blocks(title="Ontology Semantic Search") as demo:
1018
-
1019
- # ── Header ────────────────────────────────────────────────────────────
1020
- gr.HTML(f"""
1021
- <div id="app-header">
1022
- <img src="{LOGO_URL}" alt="Sensein logo" />
1023
- <div id="app-header-text">
1024
- <h1>Ontology Semantic Search</h1>
1025
- <p>Semantic search over ontologies &mdash;
1026
- ChromaDB &middot; SentenceTransformers &middot; SQLite</p>
1027
- </div>
1028
- </div>
1029
- """)
1030
-
1031
- runtime_status = gr.Textbox(
1032
- label="Runtime Status",
1033
- value="Checking local assets...",
1034
- interactive=False,
1035
- elem_id="status-bar",
1036
- )
1037
-
1038
- with gr.Tabs():
1039
-
1040
- # ── Search ────────────────────────────────────────────────────────
1041
- with gr.Tab("Search"):
1042
- with gr.Row(equal_height=False):
1043
- with gr.Column(scale=1, min_width=260, elem_id="sidebar"):
1044
- gr.Markdown("#### Filters")
1045
- c_n = gr.Slider(label="Top-K results", minimum=1,
1046
- maximum=100, value=20, step=1)
1047
- c_onto = gr.Textbox(label="Ontology filter",
1048
- placeholder="e.g. NCIT, HP, GO",
1049
- value="")
1050
- c_obs = gr.Checkbox(label="Include obsolete terms",
1051
- value=False)
1052
- c_usemsco = gr.Checkbox(label="Enable min-score filter",
1053
- value=False)
1054
- c_mscore = gr.Slider(label="Min score", minimum=0.0,
1055
- maximum=1.0, value=0.5, step=0.01)
1056
- c_detail = gr.Checkbox(label="Show definitions & parents",
1057
- value=False)
1058
-
1059
- with gr.Column(scale=4):
1060
- c_query = gr.Textbox(
1061
- label="Search query",
1062
- placeholder=(
1063
- "e.g. lung cancer "
1064
- "· astrocyte "
1065
- "· myocardial infarction"
1066
- ),
1067
- lines=2,
1068
- elem_id="query-box",
1069
- )
1070
- with gr.Row():
1071
- btn_search = gr.Button("Search", variant="primary", scale=3)
1072
- btn_clear = gr.Button("Clear", scale=1)
1073
-
1074
- search_status = gr.Textbox(
1075
- label="", value="", interactive=False,
1076
- show_label=False, elem_id="status-bar",
1077
- )
1078
- search_table = gr.Dataframe(
1079
- label="Results", interactive=False,
1080
- wrap=True, row_count=(0, "dynamic"),
1081
- elem_id="result-table",
1082
- )
1083
- search_md = gr.Markdown(value="")
1084
-
1085
- # ── SQL Console ───────────────────────────────────────────────────
1086
- with gr.Tab("SQL Console"):
1087
- gr.Markdown(
1088
- "Query the SQLite database directly. "
1089
- "**SELECT-only** mode is enabled by default."
1090
- )
1091
- with gr.Row():
1092
- with gr.Column(scale=3):
1093
- c_sql = gr.Textbox(
1094
- label="SQL Query",
1095
- value=(
1096
- "SELECT ontology_id, COUNT(*) AS n\n"
1097
- "FROM classes\n"
1098
- "GROUP BY ontology_id\n"
1099
- "ORDER BY n DESC\n"
1100
- "LIMIT 20;"
1101
- ),
1102
- lines=8,
1103
- elem_id="sql-editor",
1104
- )
1105
- with gr.Row():
1106
- sql_run = gr.Button("Execute", variant="primary")
1107
- c_select_only = gr.Checkbox(
1108
- label="SELECT-only (recommended)", value=True
1109
- )
1110
- c_max_rows = gr.Slider(
1111
- label="Row limit", minimum=10,
1112
- maximum=10000, value=1000, step=10,
1113
- )
1114
- with gr.Column(scale=1, elem_id="sidebar"):
1115
- gr.Markdown("#### Example queries")
1116
- gr.Markdown("""
1117
- ```sql
1118
- -- Count per ontology
1119
- SELECT ontology_id, COUNT(*) n
1120
- FROM classes
1121
- GROUP BY ontology_id
1122
- ORDER BY n DESC;
1123
-
1124
- -- Label search
1125
- SELECT * FROM classes
1126
- WHERE preferred_label LIKE '%cancer%'
1127
- LIMIT 50;
1128
-
1129
- -- List tables
1130
- SELECT name FROM sqlite_master
1131
- WHERE type='table';
1132
- ```
1133
- """)
1134
- sql_status = gr.Textbox(label="Status", value="", interactive=False)
1135
- sql_table = gr.Dataframe(
1136
- label="Results", interactive=False,
1137
- wrap=True, row_count=(0, "dynamic"),
1138
- )
1139
-
1140
- # ── Data & Setup ──────────────────────────────────────────────────
1141
- with gr.Tab("Data & Setup"):
1142
- with gr.Row():
1143
- with gr.Column(scale=2):
1144
- gr.Markdown(f"""
1145
- ### Download from HuggingFace
1146
- Dataset: [`{HF_DATASET_REPO}`](https://huggingface.co/datasets/{HF_DATASET_REPO})
1147
-
1148
- Downloads:
1149
- - **`{HF_DB_FILENAME}`** — SQLite ontology database
1150
- - **`{HF_CHROMA_ARCHIVE}`** — pre-built vector index *(if available)*
1151
-
1152
- Assets are saved to **`{CACHE_DIR}/`** and skipped silently if already present.
1153
- If the pre-built index is unavailable, use the **Rebuild Index** tab after downloading the DB.
1154
- """)
1155
- c_force = gr.Checkbox(
1156
- label="Force re-download (overwrite existing files)",
1157
- value=False,
1158
- )
1159
- c_dl_model = gr.Dropdown(label="Embedding model",
1160
- choices=MODEL_CHOICES,
1161
- value=DEFAULT_MODEL)
1162
- c_dl_dev = gr.Dropdown(
1163
- label="Device",
1164
- choices=["auto", "cpu", "cuda", "mps"],
1165
- value="auto",
1166
- )
1167
- btn_dl = gr.Button("Download & Load", variant="primary")
1168
- dl_log = gr.Textbox(
1169
- label="Download log", value="", lines=14,
1170
- interactive=False, elem_id="dl-log",
1171
- )
1172
- with gr.Column(scale=1, elem_id="sidebar"):
1173
- gr.Markdown("#### Asset paths")
1174
- gr.Markdown(f"""
1175
- | Asset | Path |
1176
- |---|---|
1177
- | SQLite DB | `{DEFAULT_DB_PATH}` |
1178
- | Chroma index | `{DEFAULT_CHROMA}` |
1179
- | Collection | `{DEFAULT_COLLECTION}` |
1180
- | Model | `{DEFAULT_MODEL}` |
1181
- """)
1182
- gr.Markdown("#### Env overrides")
1183
- gr.Markdown("""
1184
- ```bash
1185
- ONTOLOGY_CACHE_DIR=./cache
1186
- ONTOLOGY_DB_NAME=bioportal.db
1187
- ONTOLOGY_CHROMA_NAME=bioportal_chroma
1188
- ONTOLOGY_COLLECTION=ontology_classes
1189
- ONTOLOGY_EMBED_MODEL=BAAI/bge-small-en-v1.5
1190
- ```
1191
- """)
1192
-
1193
- # ── Rebuild Index ─────────────────────────────────────────────────
1194
- with gr.Tab("Rebuild Index"):
1195
- gr.Markdown(
1196
- "Re-embed all classes from SQLite into a fresh Chroma index. "
1197
- "Use this if the pre-built index was unavailable or is corrupted."
1198
- )
1199
- with gr.Row():
1200
- with gr.Column(scale=2):
1201
- with gr.Row():
1202
- c_rb_db = gr.Textbox(label="SQLite DB path",
1203
- value=DEFAULT_DB_PATH)
1204
- c_rb_chroma = gr.Textbox(label="Chroma path",
1205
- value=DEFAULT_CHROMA)
1206
- with gr.Row():
1207
- c_rb_col = gr.Textbox(label="Collection",
1208
- value=DEFAULT_COLLECTION)
1209
- c_rb_model = gr.Dropdown(label="Embedding model",
1210
- choices=MODEL_CHOICES,
1211
- value=DEFAULT_MODEL)
1212
- c_rb_dev = gr.Dropdown(
1213
- label="Device",
1214
- choices=["auto", "cpu", "cuda", "mps"],
1215
- value="auto",
1216
- )
1217
- c_rb_batch = gr.Slider(label="Batch size", minimum=32,
1218
- maximum=2048, value=256, step=32)
1219
- btn_rebuild = gr.Button("Start Rebuild", variant="stop")
1220
- with gr.Column(scale=1, elem_id="sidebar"):
1221
- gr.Markdown("#### Notes")
1222
- gr.Markdown("""
1223
- - Rebuilding **deletes** the existing Chroma directory first.
1224
- - Duration depends on DB size and hardware.
1225
- - The runtime is hot-reloaded automatically after rebuild.
1226
- - Use `bge-small` for fastest rebuilds; `bge-large` for highest quality.
1227
- """)
1228
- rebuild_status = gr.Textbox(label="Status", value="",
1229
- interactive=False)
1230
- rebuild_total = gr.Number(label="Vectors indexed", value=0,
1231
- precision=0, interactive=False)
1232
-
1233
- # ── Advanced Config ───────────────────────────────────────────────
1234
- with gr.Tab("Advanced Config"):
1235
- gr.Markdown(
1236
- "Change paths, model, or device at runtime and reload "
1237
- "without restarting the server."
1238
- )
1239
- with gr.Row():
1240
- with gr.Column():
1241
- adv_db = gr.Textbox(label="SQLite DB path",
1242
- value=DEFAULT_DB_PATH)
1243
- adv_chroma = gr.Textbox(label="Chroma path",
1244
- value=DEFAULT_CHROMA)
1245
- adv_col = gr.Textbox(label="Collection name",
1246
- value=DEFAULT_COLLECTION)
1247
- adv_model = gr.Dropdown(label="Embedding model",
1248
- choices=MODEL_CHOICES,
1249
- value=DEFAULT_MODEL)
1250
- adv_device = gr.Dropdown(
1251
- label="Device",
1252
- choices=["auto", "cpu", "cuda", "mps"],
1253
- value="auto",
1254
- )
1255
- adv_btn = gr.Button("Apply & Reload Runtime",
1256
- variant="primary")
1257
- adv_status = gr.Textbox(label="Status", value="",
1258
- interactive=False)
1259
- with gr.Column(elem_id="sidebar"):
1260
- gr.Markdown("#### Model options")
1261
- gr.Markdown("""
1262
- | Model | Size | Speed | Quality |
1263
- |---|---|---|---|
1264
- | `bge-small-en-v1.5` | 130 MB | Fast | Good |
1265
- | `bge-base-en-v1.5` | 440 MB | Med | Better |
1266
- | `bge-large-en-v1.5` | 1.3 GB | Slow | Best |
1267
- | `all-MiniLM-L6-v2` | 90 MB | Fastest | Fair |
1268
- """)
1269
-
1270
- # ── Event wiring ──────────────────────────────────────────────────────
1271
- btn_search.click(
1272
- fn=ui_search,
1273
- inputs=[c_query, c_n, c_onto, c_obs, c_usemsco, c_mscore, c_detail],
1274
- outputs=[search_table, search_status, search_md],
1275
- )
1276
- c_query.submit(
1277
- fn=ui_search,
1278
- inputs=[c_query, c_n, c_onto, c_obs, c_usemsco, c_mscore, c_detail],
1279
- outputs=[search_table, search_status, search_md],
1280
- )
1281
-
1282
- def _clear():
1283
- return "", pd.DataFrame(), "", ""
1284
-
1285
- btn_clear.click(
1286
- fn=_clear, inputs=[],
1287
- outputs=[c_query, search_table, search_status, search_md],
1288
- )
1289
-
1290
- sql_run.click(
1291
- fn=ui_sql,
1292
- inputs=[c_sql, c_select_only, c_max_rows],
1293
- outputs=[sql_table, sql_status],
1294
- )
1295
-
1296
- btn_dl.click(
1297
- fn=ui_download_and_reload,
1298
- inputs=[c_force, c_dl_dev, c_dl_model],
1299
- outputs=[dl_log, runtime_status],
1300
- )
1301
-
1302
- btn_rebuild.click(
1303
- fn=ui_rebuild,
1304
- inputs=[c_rb_db, c_rb_chroma, c_rb_col,
1305
- c_rb_model, c_rb_dev, c_rb_batch],
1306
- outputs=[rebuild_status, rebuild_total],
1307
- ).then(fn=ui_status_poll, inputs=[], outputs=[runtime_status])
1308
-
1309
- adv_btn.click(
1310
- fn=ui_warm_load,
1311
- inputs=[adv_db, adv_chroma, adv_col, adv_model, adv_device],
1312
- outputs=[adv_status],
1313
- ).then(fn=ui_status_poll, inputs=[], outputs=[runtime_status])
1314
-
1315
- # Refresh status on page load
1316
- demo.load(fn=ui_status_poll, inputs=[], outputs=[runtime_status])
1317
-
1318
- # Auto-poll every 2 s while the background thread is still loading.
1319
- # gr.Timer is available in Gradio 4.x+; fall back gracefully if not.
1320
- try:
1321
- _timer = gr.Timer(value=2)
1322
- _timer.tick(fn=ui_status_poll, inputs=[], outputs=[runtime_status])
1323
- except AttributeError:
1324
- pass # older Gradio — user can click Reload in Advanced Config
1325
-
1326
-
1327
- # ─────────────────────────────────────────────────────────────────────────────
1328
- # Launch
1329
- # ─────────────────────────────────────────────────────────────────────────────
1330
-
1331
- if __name__ == "__main__":
1332
- launch_kwargs: Dict[str, Any] = {
1333
- "server_name": "0.0.0.0",
1334
- "server_port": int(os.getenv("PORT", 7860)),
1335
- }
1336
-
1337
- # Gradio 6+ moved theme/css to launch(); older versions take them in Blocks.
1338
- # Detect which style this install supports.
1339
- import inspect as _inspect
1340
- _launch_sig = set(_inspect.signature(demo.launch).parameters)
1341
- if "theme" in _launch_sig:
1342
- launch_kwargs["theme"] = _theme
1343
- launch_kwargs["css"] = CUSTOM_CSS
1344
- # show_api removed in Gradio 6
1345
- if "show_api" in _launch_sig:
1346
- launch_kwargs["show_api"] = False
1347
-
1348
- demo.queue().launch(**launch_kwargs)