Spaces:
Running
Running
Improve error logging
Browse files
app.py
CHANGED
|
@@ -65,6 +65,7 @@ def setup_model(model_key, intial=False):
|
|
| 65 |
api_key=model_config.get('AUTH_TOKEN'),
|
| 66 |
base_url=base_url
|
| 67 |
)
|
|
|
|
| 68 |
log_debug(f"Switched to model {model_key} using endpoint {base_url}")
|
| 69 |
|
| 70 |
_model_hf_name = model_config.get("MODEL_HF_URL").split('https://huggingface.co/')[1]
|
|
@@ -108,6 +109,7 @@ def run_chat_inference(history, message, state):
|
|
| 108 |
|
| 109 |
# Reinitialize the OpenAI client with a random endpoint from the list
|
| 110 |
setup_model(model_config.get('MODEL_KEY'))
|
|
|
|
| 111 |
|
| 112 |
if len(history) == 0:
|
| 113 |
state["chat_id"] = uuid4().hex
|
|
@@ -160,7 +162,7 @@ def run_chat_inference(history, message, state):
|
|
| 160 |
stream=True
|
| 161 |
)
|
| 162 |
except Exception as e:
|
| 163 |
-
log_error(f"Error
|
| 164 |
error = str(e)
|
| 165 |
yield ([{"role": "assistant",
|
| 166 |
"content": "😔 The model is unavailable at the moment. Please try again later."}],
|
|
|
|
| 65 |
api_key=model_config.get('AUTH_TOKEN'),
|
| 66 |
base_url=base_url
|
| 67 |
)
|
| 68 |
+
model_config['base_url'] = base_url
|
| 69 |
log_debug(f"Switched to model {model_key} using endpoint {base_url}")
|
| 70 |
|
| 71 |
_model_hf_name = model_config.get("MODEL_HF_URL").split('https://huggingface.co/')[1]
|
|
|
|
| 109 |
|
| 110 |
# Reinitialize the OpenAI client with a random endpoint from the list
|
| 111 |
setup_model(model_config.get('MODEL_KEY'))
|
| 112 |
+
log_info("Using model {model_name} with endpoint {model_config.get('base_url')}")
|
| 113 |
|
| 114 |
if len(history) == 0:
|
| 115 |
state["chat_id"] = uuid4().hex
|
|
|
|
| 162 |
stream=True
|
| 163 |
)
|
| 164 |
except Exception as e:
|
| 165 |
+
log_error(f"Error:\n\t{e}\n\tInference failed for model {model_name} and endpoint {model_config['base_url']}")
|
| 166 |
error = str(e)
|
| 167 |
yield ([{"role": "assistant",
|
| 168 |
"content": "😔 The model is unavailable at the moment. Please try again later."}],
|