| import requests | |
| import json | |
| from openai import OpenAI | |
| from params import load_params | |
| def get_client(): | |
| params = load_params() | |
| if params['PROVIDER'] == 'local-model': | |
| return OpenAI(api_key="local-model", base_url=params['BASE_URL']) | |
| return None | |
| def send_to_chatgpt(msg_list): | |
| try: | |
| client = get_client() | |
| completion = client.chat.completions.create( | |
| model="phi3:latest", | |
| temperature=0.6, | |
| messages=msg_list | |
| ) | |
| chatgpt_response = completion.choices[0].message.content | |
| chatgpt_usage = completion.usage | |
| return chatgpt_response, chatgpt_usage | |
| except Exception as e: | |
| print(f"Error in send_to_chatgpt: {str(e)}") | |
| return f"Error: {str(e)}", None | |
| def send_to_anything_llm(msg_list): | |
| params = load_params() | |
| url = f"{params['BASE_URL']}/api/v1/workspace/{params['WORKSPACE']}/chat" | |
| headers = { | |
| 'accept': 'application/json', | |
| 'Authorization': f"Bearer {params['API_KEY']}", | |
| 'Content-Type': 'application/json' | |
| } | |
| message_content = " ".join(msg["content"] for msg in msg_list if "content" in msg) | |
| data = { | |
| "message": message_content, | |
| "mode": "chat" | |
| } | |
| data_json = json.dumps(data) | |
| try: | |
| response = requests.post(url, headers=headers, data=data_json) | |
| response.raise_for_status() | |
| response_data = response.json() | |
| chatgpt_response = response_data.get("textResponse") | |
| chatgpt_usage = response_data.get("usage", {}) | |
| return chatgpt_response, chatgpt_usage | |
| except requests.RequestException as e: | |
| print(f"Error in send_to_anything_llm: {str(e)}") | |
| return f"Error: {str(e)}", None | |
| def send_to_llm(msg_list): | |
| params = load_params() | |
| if params['PROVIDER'] == "local-model": | |
| return send_to_chatgpt(msg_list) | |
| elif params['PROVIDER'] == "anything-llm": | |
| return send_to_anything_llm(msg_list) | |
| else: | |
| raise ValueError(f"Unknown provider: {params['PROVIDER']}") |