Medica_DecisionSupportAI / llm_router.py
Rajan Sharma
Create llm_router.py
33bf7ab verified
raw
history blame
1.1 kB
import cohere, openai, anthropic
from settings import COHERE_API_KEY, COHERE_MODEL_PRIMARY, MODEL_SETTINGS, OPENAI_API_KEY, OPENAI_MODEL_FALLBACK, ANTHROPIC_API_KEY, ANTHROPIC_MODEL_FALLBACK
def cohere_chat(prompt: str):
if not COHERE_API_KEY: return None
c=cohere.Client(COHERE_API_KEY)
r=c.chat(model=COHERE_MODEL_PRIMARY, message=prompt, temperature=MODEL_SETTINGS["temperature"])
return r.text if hasattr(r,"text") else None
def openai_chat(prompt: str):
if not OPENAI_API_KEY: return None
openai.api_key=OPENAI_API_KEY
r=openai.ChatCompletion.create(model=OPENAI_MODEL_FALLBACK, messages=[{"role":"user","content":prompt}])
return r["choices"][0]["message"]["content"]
def anthropic_chat(prompt: str):
if not ANTHROPIC_API_KEY: return None
a=anthropic.Anthropic(api_key=ANTHROPIC_API_KEY)
r=a.messages.create(model=ANTHROPIC_MODEL_FALLBACK, messages=[{"role":"user","content":prompt}])
return r.content[0].text
def generate_with_fallback(prompt: str):
return cohere_chat(prompt) or openai_chat(prompt) or anthropic_chat(prompt) or ""