Spaces:
Runtime error
Runtime error
| import sys | |
| import os | |
| import requests | |
| # Ensure we can import from parent directory | |
| sys.path.append(os.path.dirname(os.path.abspath(__file__))) | |
| try: | |
| from logos.connectors import get_connector | |
| except ImportError: | |
| # If running from hf_space root, try direct import | |
| sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) | |
| from logos.connectors import get_connector | |
| def test_local_connection(port=1234): | |
| print(f"--- Telemetery Probe: Localhost:{port} ---") | |
| # 1. Initialize Connector | |
| try: | |
| url = f"http://localhost:{port}/v1" | |
| print(f"Connecting to {url}...") | |
| connector = get_connector('local', base_url=url, model="local-model") | |
| except Exception as e: | |
| print(f"[FAIL] Factory Error: {e}") | |
| return | |
| # 2. Send Ping | |
| prompt = "Reply with exactly one word: 'CONNECTED'." | |
| print(f"Sending Probe: \"{prompt}\"") | |
| try: | |
| response = connector.chat(prompt) | |
| print(f"\n[RESPONSE] >> {response}") | |
| if "CONNECTED" in response.upper(): | |
| print("\n[SUCCESS] Link Established. The Local Cluster is Online.") | |
| else: | |
| print("\n[WARN] Link Active, but response was unexpected. (Model might be chatty)") | |
| except Exception as e: | |
| print(f"\n[FAIL] Connection Refused. {e}") | |
| print("troubleshooting:") | |
| print("1. Is LM Studio Server ON? (Green light?)") | |
| print("2. Is Port correct? (Default 1234)") | |
| print("3. Is CORS enabled in LM Studio?") | |
| if __name__ == "__main__": | |
| import argparse | |
| parser = argparse.ArgumentParser() | |
| parser.add_argument("--port", type=int, default=1234, help="LM Studio/Ollama Port") | |
| args = parser.parse_args() | |
| test_local_connection(args.port) | |