Create protocols.py
Browse files- mcp/protocols.py +18 -0
mcp/protocols.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ββ mcp/protocols.py βββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 2 |
+
import asyncio
|
| 3 |
+
from mcp.openai_utils import ai_qa
|
| 4 |
+
from mcp.gemini import gemini_qa
|
| 5 |
+
|
| 6 |
+
async def draft_protocol(question: str, context: str, llm: str = "openai") -> str:
|
| 7 |
+
"""
|
| 8 |
+
Draft a detailed experimental protocol for a given hypothesis/question.
|
| 9 |
+
"""
|
| 10 |
+
if llm.lower() == "gemini":
|
| 11 |
+
qa_fn = gemini_qa
|
| 12 |
+
else:
|
| 13 |
+
qa_fn = ai_qa
|
| 14 |
+
prompt = (
|
| 15 |
+
"You are a senior researcher. Draft a step-by-step experimental protocol to test: "
|
| 16 |
+
f"{question}\nContext:\n{context}\nInclude materials, methods, controls, expected outcomes."
|
| 17 |
+
)
|
| 18 |
+
return await qa_fn(prompt)
|