Update mcp/orchestrator.py
Browse files- mcp/orchestrator.py +70 -38
mcp/orchestrator.py
CHANGED
|
@@ -1,47 +1,79 @@
|
|
| 1 |
# mcp/orchestrator.py
|
| 2 |
import asyncio
|
| 3 |
-
from
|
| 4 |
-
from mcp.
|
| 5 |
-
from mcp.
|
| 6 |
-
|
| 7 |
-
from
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
papers =
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29 |
)
|
| 30 |
|
| 31 |
-
#
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
summary = await
|
| 35 |
-
|
| 36 |
-
summary =
|
| 37 |
|
| 38 |
return {
|
| 39 |
"papers": papers,
|
| 40 |
-
"umls":
|
| 41 |
-
"
|
| 42 |
-
"
|
| 43 |
-
"
|
| 44 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
| 45 |
"ai_summary": summary,
|
| 46 |
-
"llm_used": llm
|
| 47 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
# mcp/orchestrator.py
|
| 2 |
import asyncio
|
| 3 |
+
from typing import Any, Dict
|
| 4 |
+
from mcp.arxiv import fetch_arxiv
|
| 5 |
+
from mcp.pubmed import fetch_pubmed
|
| 6 |
+
from mcp.nlp import extract_umls_concepts
|
| 7 |
+
from mcp.umls import lookup_umls
|
| 8 |
+
from mcp.umls_rel import fetch_relations
|
| 9 |
+
from mcp.openfda import fetch_drug_safety
|
| 10 |
+
from mcp.ncbi import search_gene, get_mesh_definition
|
| 11 |
+
from mcp.disgenet import disease_to_genes
|
| 12 |
+
from mcp.clinicaltrials import search_trials
|
| 13 |
+
from mcp.mygene import mygene
|
| 14 |
+
from mcp.opentargets import ot
|
| 15 |
+
from mcp.cbio import cbio
|
| 16 |
+
from mcp.openai_utils import ai_summarize, ai_qa
|
| 17 |
+
from mcp.gemini import gemini_summarize, gemini_qa
|
| 18 |
+
|
| 19 |
+
def _get_llm(llm: str):
|
| 20 |
+
return (gemini_summarize, gemini_qa) if llm.lower()=="gemini" else (ai_summarize, ai_qa)
|
| 21 |
+
|
| 22 |
+
async def orchestrate_search(query: str, llm: str="openai") -> Dict[str,Any]:
|
| 23 |
+
# 1) literature
|
| 24 |
+
arxiv_t, pubmed_t = fetch_arxiv(query), fetch_pubmed(query)
|
| 25 |
+
papers = []
|
| 26 |
+
for res in await asyncio.gather(arxiv_t, pubmed_t, return_exceptions=True):
|
| 27 |
+
if isinstance(res, list):
|
| 28 |
+
papers.extend(res)
|
| 29 |
+
|
| 30 |
+
# 2) UMLS concept linking
|
| 31 |
+
blob = " ".join(p.get("summary","") for p in papers)
|
| 32 |
+
umls = extract_umls_concepts(blob)
|
| 33 |
+
rels = await asyncio.gather(*[fetch_relations(c["cui"]) for c in umls], return_exceptions=True)
|
| 34 |
+
|
| 35 |
+
# 3) enrichment
|
| 36 |
+
keys = [c["name"] for c in umls]
|
| 37 |
+
fda_t = [fetch_drug_safety(k) for k in keys]
|
| 38 |
+
genes_t = search_gene(keys[0]) if keys else asyncio.sleep(0, result=[])
|
| 39 |
+
mesh_t = get_mesh_definition(keys[0]) if keys else asyncio.sleep(0, result="")
|
| 40 |
+
dis_t = disease_to_genes(keys[0]) if keys else asyncio.sleep(0, result=[])
|
| 41 |
+
trials_t = search_trials(query)
|
| 42 |
+
ot_t = ot.fetch(keys[0]) if keys else asyncio.sleep(0, result=[])
|
| 43 |
+
var_t = cbio.fetch_variants(keys[0]) if keys else asyncio.sleep(0, result=[])
|
| 44 |
+
|
| 45 |
+
fda, genes, mesh, dis, trials, ot_assoc, variants = await asyncio.gather(
|
| 46 |
+
asyncio.gather(*fda_t, return_exceptions=True),
|
| 47 |
+
genes_t, mesh_t, dis_t, trials_t, ot_t, var_t,
|
| 48 |
+
return_exceptions=False
|
| 49 |
)
|
| 50 |
|
| 51 |
+
# 4) AI summary
|
| 52 |
+
summarize, _ = _get_llm(llm)
|
| 53 |
+
try:
|
| 54 |
+
summary = await summarize(blob)
|
| 55 |
+
except:
|
| 56 |
+
summary = "LLM unavailable."
|
| 57 |
|
| 58 |
return {
|
| 59 |
"papers": papers,
|
| 60 |
+
"umls": umls,
|
| 61 |
+
"umls_relations": rels,
|
| 62 |
+
"drug_safety": fda,
|
| 63 |
+
"genes": [genes],
|
| 64 |
+
"mesh_defs": [mesh],
|
| 65 |
+
"gene_disease": dis,
|
| 66 |
+
"clinical_trials": trials,
|
| 67 |
+
"ot_associations": ot_assoc,
|
| 68 |
+
"variants": variants,
|
| 69 |
"ai_summary": summary,
|
| 70 |
+
"llm_used": llm.lower()
|
| 71 |
}
|
| 72 |
+
|
| 73 |
+
async def answer_ai_question(question: str, context: str="", llm: str="openai"):
|
| 74 |
+
_, qa = _get_llm(llm)
|
| 75 |
+
try:
|
| 76 |
+
ans = await qa(question, context)
|
| 77 |
+
except:
|
| 78 |
+
ans = "LLM unavailable."
|
| 79 |
+
return {"answer": ans}
|