File size: 2,466 Bytes
1b460ab e2e7692 1b460ab 1856369 1b460ab e2e7692 1b460ab 1856369 e2e7692 1856369 e2e7692 1b460ab e2e7692 1b460ab 1856369 1b460ab e2e7692 1b460ab 1856369 e2e7692 1b460ab e2e7692 1b460ab e2e7692 1b460ab e2e7692 abcf75a e2e7692 1b460ab e2e7692 abcf75a e2e7692 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 |
import requests
import openai
from config import (
OPENAI_API_KEY,
OPENAI_MODEL,
USE_OLLAMA,
OLLAMA_MODEL,
USE_OPENROUTER,
OPENROUTER_API_KEY,
OPENROUTER_MODEL,
TEMPERATURE,
)
openai.api_key = OPENAI_API_KEY
def query_openai(messages):
try:
response = openai.ChatCompletion.create(
model=OPENAI_MODEL, messages=messages, temperature=TEMPERATURE
)
return response["choices"][0]["message"]["content"]
except Exception as e:
return f"⚠️ OpenAI Error: {e}"
def query_openrouter(messages):
headers = {
"Authorization": f"Bearer {OPENROUTER_API_KEY}",
"Content-Type": "application/json",
}
payload = {
"model": OPENROUTER_MODEL,
"messages": messages,
"temperature": TEMPERATURE,
}
try:
response = requests.post(
"https://openrouter.ai/api/v1/chat/completions",
headers=headers,
json=payload,
)
return response.json()["choices"][0]["message"]["content"]
except Exception as e:
return f"⚠️ OpenRouter Error: {e}"
def query_ollama(prompt):
try:
response = requests.post(
"http://localhost:11434/api/generate",
json={"model": OLLAMA_MODEL, "prompt": prompt},
)
return response.json()["response"]
except Exception as e:
return f"⚠️ Ollama Error: {e}"
def generate_response(persona_prompt, user_input, history):
# Handle Ollama case
if USE_OLLAMA:
# Convert history to Ollama format
full_prompt = f"{persona_prompt}\n\n"
for msg in history:
full_prompt += f"{msg['role'].capitalize()}: {msg['content']}\n"
full_prompt += f"User: {user_input}\nBot:"
reply = query_ollama(full_prompt)
return history + [
{"role": "user", "content": user_input},
{"role": "assistant", "content": reply},
]
# Handle OpenAI/OpenRouter case
messages = [{"role": "system", "content": persona_prompt}]
messages.extend(history)
messages.append({"role": "user", "content": user_input})
if USE_OPENROUTER:
reply = query_openrouter(messages)
else:
reply = query_openai(messages)
# Return the complete history with the new messages
return history + [
{"role": "user", "content": user_input},
{"role": "assistant", "content": reply},
]
|