Create gemini.py
Browse files- mcp/gemini.py +29 -0
mcp/gemini.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# mcp/gemini.py
|
| 2 |
+
"""
|
| 3 |
+
Lightweight Gemini-Pro helper (text in → text out).
|
| 4 |
+
Requires env var GEMINI_KEY.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import os, asyncio, google.generativeai as genai # SDK :contentReference[oaicite:1]{index=1}
|
| 8 |
+
|
| 9 |
+
GEN_KEY = os.getenv("GEMINI_KEY")
|
| 10 |
+
if GEN_KEY:
|
| 11 |
+
genai.configure(api_key=GEN_KEY)
|
| 12 |
+
|
| 13 |
+
_MODEL = None
|
| 14 |
+
def _model():
|
| 15 |
+
global _MODEL
|
| 16 |
+
if _MODEL is None:
|
| 17 |
+
_MODEL = genai.GenerativeModel("gemini-pro") # 32 k ctx :contentReference[oaicite:2]{index=2}
|
| 18 |
+
return _MODEL
|
| 19 |
+
|
| 20 |
+
# ---------- public helpers ----------
|
| 21 |
+
async def gemini_summarize(text: str, words: int = 150) -> str:
|
| 22 |
+
prompt = f"Summarize in ≤{words} words:\n{text[:12000]}"
|
| 23 |
+
rsp = await asyncio.to_thread(_model().generate_content, prompt)
|
| 24 |
+
return rsp.text
|
| 25 |
+
|
| 26 |
+
async def gemini_qa(question: str, context: str = "") -> str:
|
| 27 |
+
prompt = f"Answer briefly.\nContext:\n{context[:10000]}\n\nQ: {question}\nA:"
|
| 28 |
+
rsp = await asyncio.to_thread(_model().generate_content, prompt)
|
| 29 |
+
return rsp.text
|