Nhughes09 commited on
Commit ·
7691144
1
Parent(s): 7f10e33
V8.4: macOS python3 compatibility and Model Discovery
Browse files
app.py
CHANGED
|
@@ -615,9 +615,10 @@ def startup_event():
|
|
| 615 |
try:
|
| 616 |
# Quick ping to check if local server is up
|
| 617 |
logger.info("Pinging local Ollama server...")
|
| 618 |
-
ollama.list() # Simple list call to verify connection
|
| 619 |
-
|
| 620 |
-
|
|
|
|
| 621 |
except Exception as e:
|
| 622 |
logger.warning(f"Ollama is not running locally. Falling back to cloud. Error: {e}")
|
| 623 |
update_live_state("CLOUD FALLBACK MODE", "\n--- NEURAL CORE: OLLAMA OFFLINE. CLOUD ACTIVE. ---\n")
|
|
|
|
| 615 |
try:
|
| 616 |
# Quick ping to check if local server is up
|
| 617 |
logger.info("Pinging local Ollama server...")
|
| 618 |
+
models = ollama.list() # Simple list call to verify connection
|
| 619 |
+
model_names = [m['name'] for m in models.get('models', [])]
|
| 620 |
+
logger.info(f"Ollama is READY on local M1 GPU. Models: {model_names}")
|
| 621 |
+
update_live_state("LOCAL ENGINE READY", f"\n--- NEURAL CORE: OLLAMA READY (M1 GPU) ---\nINSTALLED MODELS: {', '.join(model_names)}\n")
|
| 622 |
except Exception as e:
|
| 623 |
logger.warning(f"Ollama is not running locally. Falling back to cloud. Error: {e}")
|
| 624 |
update_live_state("CLOUD FALLBACK MODE", "\n--- NEURAL CORE: OLLAMA OFFLINE. CLOUD ACTIVE. ---\n")
|