|
|
import requests |
|
|
import openai |
|
|
from config import ( |
|
|
OPENAI_API_KEY, |
|
|
OPENAI_MODEL, |
|
|
USE_OLLAMA, |
|
|
OLLAMA_MODEL, |
|
|
USE_OPENROUTER, |
|
|
OPENROUTER_API_KEY, |
|
|
OPENROUTER_MODEL, |
|
|
TEMPERATURE, |
|
|
) |
|
|
|
|
|
openai.api_key = OPENAI_API_KEY |
|
|
|
|
|
|
|
|
def query_openai(messages): |
|
|
try: |
|
|
response = openai.ChatCompletion.create( |
|
|
model=OPENAI_MODEL, messages=messages, temperature=TEMPERATURE |
|
|
) |
|
|
return response["choices"][0]["message"]["content"] |
|
|
except Exception as e: |
|
|
return f"⚠️ OpenAI Error: {e}" |
|
|
|
|
|
|
|
|
def query_openrouter(messages): |
|
|
headers = { |
|
|
"Authorization": f"Bearer {OPENROUTER_API_KEY}", |
|
|
"Content-Type": "application/json", |
|
|
} |
|
|
payload = { |
|
|
"model": OPENROUTER_MODEL, |
|
|
"messages": messages, |
|
|
"temperature": TEMPERATURE, |
|
|
} |
|
|
try: |
|
|
response = requests.post( |
|
|
"https://openrouter.ai/api/v1/chat/completions", |
|
|
headers=headers, |
|
|
json=payload, |
|
|
) |
|
|
return response.json()["choices"][0]["message"]["content"] |
|
|
except Exception as e: |
|
|
return f"⚠️ OpenRouter Error: {e}" |
|
|
|
|
|
|
|
|
def query_ollama(prompt): |
|
|
try: |
|
|
response = requests.post( |
|
|
"http://localhost:11434/api/generate", |
|
|
json={"model": OLLAMA_MODEL, "prompt": prompt}, |
|
|
) |
|
|
return response.json()["response"] |
|
|
except Exception as e: |
|
|
return f"⚠️ Ollama Error: {e}" |
|
|
|
|
|
|
|
|
def generate_response(persona_prompt, user_input, history): |
|
|
|
|
|
if USE_OLLAMA: |
|
|
|
|
|
full_prompt = f"{persona_prompt}\n\n" |
|
|
for msg in history: |
|
|
full_prompt += f"{msg['role'].capitalize()}: {msg['content']}\n" |
|
|
full_prompt += f"User: {user_input}\nBot:" |
|
|
|
|
|
reply = query_ollama(full_prompt) |
|
|
return history + [ |
|
|
{"role": "user", "content": user_input}, |
|
|
{"role": "assistant", "content": reply}, |
|
|
] |
|
|
|
|
|
|
|
|
messages = [{"role": "system", "content": persona_prompt}] |
|
|
messages.extend(history) |
|
|
messages.append({"role": "user", "content": user_input}) |
|
|
|
|
|
if USE_OPENROUTER: |
|
|
reply = query_openrouter(messages) |
|
|
else: |
|
|
reply = query_openai(messages) |
|
|
|
|
|
|
|
|
return history + [ |
|
|
{"role": "user", "content": user_input}, |
|
|
{"role": "assistant", "content": reply}, |
|
|
] |
|
|
|