Spaces:
Sleeping
Sleeping
Rajan Sharma
commited on
Create llm_router.py
Browse files- llm_router.py +23 -0
llm_router.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import cohere, openai, anthropic
|
| 2 |
+
from settings import COHERE_API_KEY, COHERE_MODEL_PRIMARY, MODEL_SETTINGS, OPENAI_API_KEY, OPENAI_MODEL_FALLBACK, ANTHROPIC_API_KEY, ANTHROPIC_MODEL_FALLBACK
|
| 3 |
+
|
| 4 |
+
def cohere_chat(prompt: str):
|
| 5 |
+
if not COHERE_API_KEY: return None
|
| 6 |
+
c=cohere.Client(COHERE_API_KEY)
|
| 7 |
+
r=c.chat(model=COHERE_MODEL_PRIMARY, message=prompt, temperature=MODEL_SETTINGS["temperature"])
|
| 8 |
+
return r.text if hasattr(r,"text") else None
|
| 9 |
+
|
| 10 |
+
def openai_chat(prompt: str):
|
| 11 |
+
if not OPENAI_API_KEY: return None
|
| 12 |
+
openai.api_key=OPENAI_API_KEY
|
| 13 |
+
r=openai.ChatCompletion.create(model=OPENAI_MODEL_FALLBACK, messages=[{"role":"user","content":prompt}])
|
| 14 |
+
return r["choices"][0]["message"]["content"]
|
| 15 |
+
|
| 16 |
+
def anthropic_chat(prompt: str):
|
| 17 |
+
if not ANTHROPIC_API_KEY: return None
|
| 18 |
+
a=anthropic.Anthropic(api_key=ANTHROPIC_API_KEY)
|
| 19 |
+
r=a.messages.create(model=ANTHROPIC_MODEL_FALLBACK, messages=[{"role":"user","content":prompt}])
|
| 20 |
+
return r.content[0].text
|
| 21 |
+
|
| 22 |
+
def generate_with_fallback(prompt: str):
|
| 23 |
+
return cohere_chat(prompt) or openai_chat(prompt) or anthropic_chat(prompt) or ""
|