Add files using upload-large-folder tool
Browse files- projects/ui/serena-new/src/interprompt/util/__init__.py +0 -0
- projects/ui/serena-new/src/serena/constants.py +35 -0
- projects/ui/serena-new/src/serena/dashboard.py +162 -0
- projects/ui/serena-new/src/serena/gui_log_viewer.py +405 -0
- projects/ui/serena-new/src/serena/mcp.py +348 -0
- projects/ui/serena-new/src/serena/project.py +306 -0
- projects/ui/serena-new/src/serena/prompt_factory.py +14 -0
- projects/ui/serena-new/src/serena/symbol.py +645 -0
- projects/ui/serena-new/src/serena/text_utils.py +368 -0
- projects/ui/serena-new/src/solidlsp/.gitignore +1 -0
- projects/ui/serena-new/src/solidlsp/__init__.py +2 -0
- projects/ui/serena-new/src/solidlsp/ls.py +1738 -0
- projects/ui/serena-new/src/solidlsp/ls_config.py +156 -0
- projects/ui/serena-new/src/solidlsp/ls_exceptions.py +40 -0
- projects/ui/serena-new/src/solidlsp/ls_handler.py +581 -0
- projects/ui/serena-new/src/solidlsp/ls_logger.py +66 -0
- projects/ui/serena-new/src/solidlsp/ls_request.py +383 -0
- projects/ui/serena-new/src/solidlsp/ls_types.py +343 -0
- projects/ui/serena-new/src/solidlsp/ls_utils.py +406 -0
- projects/ui/serena-new/src/solidlsp/settings.py +29 -0
projects/ui/serena-new/src/interprompt/util/__init__.py
ADDED
|
File without changes
|
projects/ui/serena-new/src/serena/constants.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pathlib import Path
|
| 2 |
+
|
| 3 |
+
_repo_root_path = Path(__file__).parent.parent.parent.resolve()
|
| 4 |
+
_serena_pkg_path = Path(__file__).parent.resolve()
|
| 5 |
+
|
| 6 |
+
SERENA_MANAGED_DIR_NAME = ".serena"
|
| 7 |
+
_serena_in_home_managed_dir = Path.home() / ".serena"
|
| 8 |
+
|
| 9 |
+
SERENA_MANAGED_DIR_IN_HOME = str(_serena_in_home_managed_dir)
|
| 10 |
+
|
| 11 |
+
# TODO: Path-related constants should be moved to SerenaPaths; don't add further constants here.
|
| 12 |
+
REPO_ROOT = str(_repo_root_path)
|
| 13 |
+
PROMPT_TEMPLATES_DIR_INTERNAL = str(_serena_pkg_path / "resources" / "config" / "prompt_templates")
|
| 14 |
+
PROMPT_TEMPLATES_DIR_IN_USER_HOME = str(_serena_in_home_managed_dir / "prompt_templates")
|
| 15 |
+
SERENAS_OWN_CONTEXT_YAMLS_DIR = str(_serena_pkg_path / "resources" / "config" / "contexts")
|
| 16 |
+
"""The contexts that are shipped with the Serena package, i.e. the default contexts."""
|
| 17 |
+
USER_CONTEXT_YAMLS_DIR = str(_serena_in_home_managed_dir / "contexts")
|
| 18 |
+
"""Contexts defined by the user. If a name of a context matches a name of a context in SERENAS_OWN_CONTEXT_YAMLS_DIR, the user context will override the default one."""
|
| 19 |
+
SERENAS_OWN_MODE_YAMLS_DIR = str(_serena_pkg_path / "resources" / "config" / "modes")
|
| 20 |
+
"""The modes that are shipped with the Serena package, i.e. the default modes."""
|
| 21 |
+
USER_MODE_YAMLS_DIR = str(_serena_in_home_managed_dir / "modes")
|
| 22 |
+
"""Modes defined by the user. If a name of a mode matches a name of a mode in SERENAS_OWN_MODE_YAMLS_DIR, the user mode will override the default one."""
|
| 23 |
+
INTERNAL_MODE_YAMLS_DIR = str(_serena_pkg_path / "resources" / "config" / "internal_modes")
|
| 24 |
+
"""Internal modes, never overridden by user modes."""
|
| 25 |
+
SERENA_DASHBOARD_DIR = str(_serena_pkg_path / "resources" / "dashboard")
|
| 26 |
+
SERENA_ICON_DIR = str(_serena_pkg_path / "resources" / "icons")
|
| 27 |
+
|
| 28 |
+
DEFAULT_ENCODING = "utf-8"
|
| 29 |
+
DEFAULT_CONTEXT = "desktop-app"
|
| 30 |
+
DEFAULT_MODES = ("interactive", "editing")
|
| 31 |
+
|
| 32 |
+
PROJECT_TEMPLATE_FILE = str(_serena_pkg_path / "resources" / "project.template.yml")
|
| 33 |
+
SERENA_CONFIG_TEMPLATE_FILE = str(_serena_pkg_path / "resources" / "serena_config.template.yml")
|
| 34 |
+
|
| 35 |
+
SERENA_LOG_FORMAT = "%(levelname)-5s %(asctime)-15s [%(threadName)s] %(name)s:%(funcName)s:%(lineno)d - %(message)s"
|
projects/ui/serena-new/src/serena/dashboard.py
ADDED
|
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import socket
|
| 3 |
+
import threading
|
| 4 |
+
from collections.abc import Callable
|
| 5 |
+
from typing import Any
|
| 6 |
+
|
| 7 |
+
from flask import Flask, Response, request, send_from_directory
|
| 8 |
+
from pydantic import BaseModel
|
| 9 |
+
from sensai.util import logging
|
| 10 |
+
|
| 11 |
+
from serena.analytics import ToolUsageStats
|
| 12 |
+
from serena.constants import SERENA_DASHBOARD_DIR
|
| 13 |
+
from serena.util.logging import MemoryLogHandler
|
| 14 |
+
|
| 15 |
+
log = logging.getLogger(__name__)
|
| 16 |
+
|
| 17 |
+
# disable Werkzeug's logging to avoid cluttering the output
|
| 18 |
+
logging.getLogger("werkzeug").setLevel(logging.WARNING)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class RequestLog(BaseModel):
|
| 22 |
+
start_idx: int = 0
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class ResponseLog(BaseModel):
|
| 26 |
+
messages: list[str]
|
| 27 |
+
max_idx: int
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class ResponseToolNames(BaseModel):
|
| 31 |
+
tool_names: list[str]
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class ResponseToolStats(BaseModel):
|
| 35 |
+
stats: dict[str, dict[str, int]]
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class SerenaDashboardAPI:
|
| 39 |
+
log = logging.getLogger(__qualname__)
|
| 40 |
+
|
| 41 |
+
def __init__(
|
| 42 |
+
self,
|
| 43 |
+
memory_log_handler: MemoryLogHandler,
|
| 44 |
+
tool_names: list[str],
|
| 45 |
+
shutdown_callback: Callable[[], None] | None = None,
|
| 46 |
+
tool_usage_stats: ToolUsageStats | None = None,
|
| 47 |
+
) -> None:
|
| 48 |
+
self._memory_log_handler = memory_log_handler
|
| 49 |
+
self._tool_names = tool_names
|
| 50 |
+
self._shutdown_callback = shutdown_callback
|
| 51 |
+
self._app = Flask(__name__)
|
| 52 |
+
self._tool_usage_stats = tool_usage_stats
|
| 53 |
+
self._setup_routes()
|
| 54 |
+
|
| 55 |
+
@property
|
| 56 |
+
def memory_log_handler(self) -> MemoryLogHandler:
|
| 57 |
+
return self._memory_log_handler
|
| 58 |
+
|
| 59 |
+
def _setup_routes(self) -> None:
|
| 60 |
+
# Static files
|
| 61 |
+
@self._app.route("/dashboard/<path:filename>")
|
| 62 |
+
def serve_dashboard(filename: str) -> Response:
|
| 63 |
+
return send_from_directory(SERENA_DASHBOARD_DIR, filename)
|
| 64 |
+
|
| 65 |
+
@self._app.route("/dashboard/")
|
| 66 |
+
def serve_dashboard_index() -> Response:
|
| 67 |
+
return send_from_directory(SERENA_DASHBOARD_DIR, "index.html")
|
| 68 |
+
|
| 69 |
+
# API routes
|
| 70 |
+
@self._app.route("/get_log_messages", methods=["POST"])
|
| 71 |
+
def get_log_messages() -> dict[str, Any]:
|
| 72 |
+
request_data = request.get_json()
|
| 73 |
+
if not request_data:
|
| 74 |
+
request_log = RequestLog()
|
| 75 |
+
else:
|
| 76 |
+
request_log = RequestLog.model_validate(request_data)
|
| 77 |
+
|
| 78 |
+
result = self._get_log_messages(request_log)
|
| 79 |
+
return result.model_dump()
|
| 80 |
+
|
| 81 |
+
@self._app.route("/get_tool_names", methods=["GET"])
|
| 82 |
+
def get_tool_names() -> dict[str, Any]:
|
| 83 |
+
result = self._get_tool_names()
|
| 84 |
+
return result.model_dump()
|
| 85 |
+
|
| 86 |
+
@self._app.route("/get_tool_stats", methods=["GET"])
|
| 87 |
+
def get_tool_stats_route() -> dict[str, Any]:
|
| 88 |
+
result = self._get_tool_stats()
|
| 89 |
+
return result.model_dump()
|
| 90 |
+
|
| 91 |
+
@self._app.route("/clear_tool_stats", methods=["POST"])
|
| 92 |
+
def clear_tool_stats_route() -> dict[str, str]:
|
| 93 |
+
self._clear_tool_stats()
|
| 94 |
+
return {"status": "cleared"}
|
| 95 |
+
|
| 96 |
+
@self._app.route("/get_token_count_estimator_name", methods=["GET"])
|
| 97 |
+
def get_token_count_estimator_name() -> dict[str, str]:
|
| 98 |
+
estimator_name = self._tool_usage_stats.token_estimator_name if self._tool_usage_stats else "unknown"
|
| 99 |
+
return {"token_count_estimator_name": estimator_name}
|
| 100 |
+
|
| 101 |
+
@self._app.route("/shutdown", methods=["PUT"])
|
| 102 |
+
def shutdown() -> dict[str, str]:
|
| 103 |
+
self._shutdown()
|
| 104 |
+
return {"status": "shutting down"}
|
| 105 |
+
|
| 106 |
+
def _get_log_messages(self, request_log: RequestLog) -> ResponseLog:
|
| 107 |
+
all_messages = self._memory_log_handler.get_log_messages()
|
| 108 |
+
requested_messages = all_messages[request_log.start_idx :] if request_log.start_idx <= len(all_messages) else []
|
| 109 |
+
return ResponseLog(messages=requested_messages, max_idx=len(all_messages) - 1)
|
| 110 |
+
|
| 111 |
+
def _get_tool_names(self) -> ResponseToolNames:
|
| 112 |
+
return ResponseToolNames(tool_names=self._tool_names)
|
| 113 |
+
|
| 114 |
+
def _get_tool_stats(self) -> ResponseToolStats:
|
| 115 |
+
if self._tool_usage_stats is not None:
|
| 116 |
+
return ResponseToolStats(stats=self._tool_usage_stats.get_tool_stats_dict())
|
| 117 |
+
else:
|
| 118 |
+
return ResponseToolStats(stats={})
|
| 119 |
+
|
| 120 |
+
def _clear_tool_stats(self) -> None:
|
| 121 |
+
if self._tool_usage_stats is not None:
|
| 122 |
+
self._tool_usage_stats.clear()
|
| 123 |
+
|
| 124 |
+
def _shutdown(self) -> None:
|
| 125 |
+
log.info("Shutting down Serena")
|
| 126 |
+
if self._shutdown_callback:
|
| 127 |
+
self._shutdown_callback()
|
| 128 |
+
else:
|
| 129 |
+
# noinspection PyProtectedMember
|
| 130 |
+
# noinspection PyUnresolvedReferences
|
| 131 |
+
os._exit(0)
|
| 132 |
+
|
| 133 |
+
@staticmethod
|
| 134 |
+
def _find_first_free_port(start_port: int) -> int:
|
| 135 |
+
port = start_port
|
| 136 |
+
while port <= 65535:
|
| 137 |
+
try:
|
| 138 |
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
|
| 139 |
+
sock.bind(("0.0.0.0", port))
|
| 140 |
+
return port
|
| 141 |
+
except OSError:
|
| 142 |
+
port += 1
|
| 143 |
+
|
| 144 |
+
raise RuntimeError(f"No free ports found starting from {start_port}")
|
| 145 |
+
|
| 146 |
+
def run(self, host: str = "0.0.0.0", port: int = 0x5EDA) -> int:
|
| 147 |
+
"""
|
| 148 |
+
Runs the dashboard on the given host and port and returns the port number.
|
| 149 |
+
"""
|
| 150 |
+
# patch flask.cli.show_server to avoid printing the server info
|
| 151 |
+
from flask import cli
|
| 152 |
+
|
| 153 |
+
cli.show_server_banner = lambda *args, **kwargs: None
|
| 154 |
+
|
| 155 |
+
self._app.run(host=host, port=port, debug=False, use_reloader=False, threaded=True)
|
| 156 |
+
return port
|
| 157 |
+
|
| 158 |
+
def run_in_thread(self) -> tuple[threading.Thread, int]:
|
| 159 |
+
port = self._find_first_free_port(0x5EDA)
|
| 160 |
+
thread = threading.Thread(target=lambda: self.run(port=port), daemon=True)
|
| 161 |
+
thread.start()
|
| 162 |
+
return thread, port
|
projects/ui/serena-new/src/serena/gui_log_viewer.py
ADDED
|
@@ -0,0 +1,405 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# mypy: ignore-errors
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
import queue
|
| 5 |
+
import sys
|
| 6 |
+
import threading
|
| 7 |
+
import tkinter as tk
|
| 8 |
+
import traceback
|
| 9 |
+
from enum import Enum, auto
|
| 10 |
+
from pathlib import Path
|
| 11 |
+
from typing import Literal
|
| 12 |
+
|
| 13 |
+
from serena import constants
|
| 14 |
+
from serena.util.logging import MemoryLogHandler
|
| 15 |
+
|
| 16 |
+
log = logging.getLogger(__name__)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class LogLevel(Enum):
|
| 20 |
+
DEBUG = auto()
|
| 21 |
+
INFO = auto()
|
| 22 |
+
WARNING = auto()
|
| 23 |
+
ERROR = auto()
|
| 24 |
+
DEFAULT = auto()
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class GuiLogViewer:
|
| 28 |
+
"""
|
| 29 |
+
A class that creates a Tkinter GUI for displaying log messages in a separate thread.
|
| 30 |
+
The log viewer supports coloring based on log levels (DEBUG, INFO, WARNING, ERROR).
|
| 31 |
+
It can also highlight tool names in boldface when they appear in log messages.
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
def __init__(
|
| 35 |
+
self,
|
| 36 |
+
mode: Literal["dashboard", "error"],
|
| 37 |
+
title="Log Viewer",
|
| 38 |
+
memory_log_handler: MemoryLogHandler | None = None,
|
| 39 |
+
width=800,
|
| 40 |
+
height=600,
|
| 41 |
+
):
|
| 42 |
+
"""
|
| 43 |
+
:param mode: the mode; if "dashboard", run a dashboard with logs and some control options; if "error", run
|
| 44 |
+
a simple error log viewer (for fatal exceptions)
|
| 45 |
+
:param title: the window title
|
| 46 |
+
:param memory_log_handler: an optional log handler from which to obtain log messages; If not provided,
|
| 47 |
+
must pass the instance to a `GuiLogViewerHandler` to add log messages.
|
| 48 |
+
:param width: the initial window width
|
| 49 |
+
:param height: the initial window height
|
| 50 |
+
"""
|
| 51 |
+
self.mode = mode
|
| 52 |
+
self.title = title
|
| 53 |
+
self.width = width
|
| 54 |
+
self.height = height
|
| 55 |
+
self.message_queue = queue.Queue()
|
| 56 |
+
self.running = False
|
| 57 |
+
self.log_thread = None
|
| 58 |
+
self.tool_names = [] # List to store tool names for highlighting
|
| 59 |
+
|
| 60 |
+
# Define colors for different log levels
|
| 61 |
+
self.log_colors = {
|
| 62 |
+
LogLevel.DEBUG: "#808080", # Gray
|
| 63 |
+
LogLevel.INFO: "#000000", # Black
|
| 64 |
+
LogLevel.WARNING: "#FF8C00", # Dark Orange
|
| 65 |
+
LogLevel.ERROR: "#FF0000", # Red
|
| 66 |
+
LogLevel.DEFAULT: "#000000", # Black
|
| 67 |
+
}
|
| 68 |
+
|
| 69 |
+
if memory_log_handler is not None:
|
| 70 |
+
for msg in memory_log_handler.get_log_messages():
|
| 71 |
+
self.message_queue.put(msg)
|
| 72 |
+
memory_log_handler.add_emit_callback(lambda msg: self.message_queue.put(msg))
|
| 73 |
+
|
| 74 |
+
def start(self):
|
| 75 |
+
"""Start the log viewer in a separate thread."""
|
| 76 |
+
if not self.running:
|
| 77 |
+
self.log_thread = threading.Thread(target=self.run_gui)
|
| 78 |
+
self.log_thread.daemon = True
|
| 79 |
+
self.log_thread.start()
|
| 80 |
+
return True
|
| 81 |
+
return False
|
| 82 |
+
|
| 83 |
+
def stop(self):
|
| 84 |
+
"""Stop the log viewer."""
|
| 85 |
+
if self.running:
|
| 86 |
+
# Add a sentinel value to the queue to signal the GUI to exit
|
| 87 |
+
self.message_queue.put(None)
|
| 88 |
+
return True
|
| 89 |
+
return False
|
| 90 |
+
|
| 91 |
+
def set_tool_names(self, tool_names):
|
| 92 |
+
"""
|
| 93 |
+
Set or update the list of tool names to be highlighted in log messages.
|
| 94 |
+
|
| 95 |
+
Args:
|
| 96 |
+
tool_names (list): A list of tool name strings to highlight
|
| 97 |
+
|
| 98 |
+
"""
|
| 99 |
+
self.tool_names = tool_names
|
| 100 |
+
|
| 101 |
+
def add_log(self, message):
|
| 102 |
+
"""
|
| 103 |
+
Add a log message to the viewer.
|
| 104 |
+
|
| 105 |
+
Args:
|
| 106 |
+
message (str): The log message to display
|
| 107 |
+
|
| 108 |
+
"""
|
| 109 |
+
self.message_queue.put(message)
|
| 110 |
+
|
| 111 |
+
def _determine_log_level(self, message):
|
| 112 |
+
"""
|
| 113 |
+
Determine the log level from the message.
|
| 114 |
+
|
| 115 |
+
Args:
|
| 116 |
+
message (str): The log message
|
| 117 |
+
|
| 118 |
+
Returns:
|
| 119 |
+
LogLevel: The determined log level
|
| 120 |
+
|
| 121 |
+
"""
|
| 122 |
+
message_upper = message.upper()
|
| 123 |
+
if message_upper.startswith("DEBUG"):
|
| 124 |
+
return LogLevel.DEBUG
|
| 125 |
+
elif message_upper.startswith("INFO"):
|
| 126 |
+
return LogLevel.INFO
|
| 127 |
+
elif message_upper.startswith("WARNING"):
|
| 128 |
+
return LogLevel.WARNING
|
| 129 |
+
elif message_upper.startswith("ERROR"):
|
| 130 |
+
return LogLevel.ERROR
|
| 131 |
+
else:
|
| 132 |
+
return LogLevel.DEFAULT
|
| 133 |
+
|
| 134 |
+
def _process_queue(self):
|
| 135 |
+
"""Process messages from the queue and update the text widget."""
|
| 136 |
+
try:
|
| 137 |
+
while not self.message_queue.empty():
|
| 138 |
+
message = self.message_queue.get_nowait()
|
| 139 |
+
|
| 140 |
+
# Check for sentinel value to exit
|
| 141 |
+
if message is None:
|
| 142 |
+
self.root.quit()
|
| 143 |
+
return
|
| 144 |
+
|
| 145 |
+
# Check if scrollbar is at the bottom before adding new text
|
| 146 |
+
# Get current scroll position
|
| 147 |
+
current_position = self.text_widget.yview()
|
| 148 |
+
# If near the bottom (allowing for small floating point differences)
|
| 149 |
+
was_at_bottom = current_position[1] > 0.99
|
| 150 |
+
|
| 151 |
+
log_level = self._determine_log_level(message)
|
| 152 |
+
|
| 153 |
+
# Insert the message at the end of the text with appropriate log level tag
|
| 154 |
+
self.text_widget.configure(state=tk.NORMAL)
|
| 155 |
+
|
| 156 |
+
# Find tool names in the message and highlight them
|
| 157 |
+
if self.tool_names:
|
| 158 |
+
# Capture start position (before insertion)
|
| 159 |
+
start_index = self.text_widget.index("end-1c")
|
| 160 |
+
|
| 161 |
+
# Insert the message
|
| 162 |
+
self.text_widget.insert(tk.END, message + "\n", log_level.name)
|
| 163 |
+
|
| 164 |
+
# Convert start index to line/char format
|
| 165 |
+
line, char = map(int, start_index.split("."))
|
| 166 |
+
|
| 167 |
+
# Search for tool names in the message string directly
|
| 168 |
+
for tool_name in self.tool_names:
|
| 169 |
+
start_offset = 0
|
| 170 |
+
while True:
|
| 171 |
+
found_at = message.find(tool_name, start_offset)
|
| 172 |
+
if found_at == -1:
|
| 173 |
+
break
|
| 174 |
+
|
| 175 |
+
# Calculate line/column from offset
|
| 176 |
+
offset_line = line
|
| 177 |
+
offset_char = char
|
| 178 |
+
for c in message[:found_at]:
|
| 179 |
+
if c == "\n":
|
| 180 |
+
offset_line += 1
|
| 181 |
+
offset_char = 0
|
| 182 |
+
else:
|
| 183 |
+
offset_char += 1
|
| 184 |
+
|
| 185 |
+
# Construct index positions
|
| 186 |
+
start_pos = f"{offset_line}.{offset_char}"
|
| 187 |
+
end_pos = f"{offset_line}.{offset_char + len(tool_name)}"
|
| 188 |
+
|
| 189 |
+
# Add tag to highlight the tool name
|
| 190 |
+
self.text_widget.tag_add("TOOL_NAME", start_pos, end_pos)
|
| 191 |
+
|
| 192 |
+
start_offset = found_at + len(tool_name)
|
| 193 |
+
|
| 194 |
+
else:
|
| 195 |
+
# No tool names to highlight, just insert the message
|
| 196 |
+
self.text_widget.insert(tk.END, message + "\n", log_level.name)
|
| 197 |
+
|
| 198 |
+
self.text_widget.configure(state=tk.DISABLED)
|
| 199 |
+
|
| 200 |
+
# Auto-scroll to the bottom only if it was already at the bottom
|
| 201 |
+
if was_at_bottom:
|
| 202 |
+
self.text_widget.see(tk.END)
|
| 203 |
+
|
| 204 |
+
# Schedule to check the queue again
|
| 205 |
+
if self.running:
|
| 206 |
+
self.root.after(100, self._process_queue)
|
| 207 |
+
|
| 208 |
+
except Exception as e:
|
| 209 |
+
print(f"Error processing message queue: {e}", file=sys.stderr)
|
| 210 |
+
if self.running:
|
| 211 |
+
self.root.after(100, self._process_queue)
|
| 212 |
+
|
| 213 |
+
def run_gui(self):
|
| 214 |
+
"""Run the GUI"""
|
| 215 |
+
self.running = True
|
| 216 |
+
try:
|
| 217 |
+
# Set app id (avoid app being lumped together with other Python-based apps in Windows taskbar)
|
| 218 |
+
if sys.platform == "win32":
|
| 219 |
+
import ctypes
|
| 220 |
+
|
| 221 |
+
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID("oraios.serena")
|
| 222 |
+
|
| 223 |
+
self.root = tk.Tk()
|
| 224 |
+
self.root.title(self.title)
|
| 225 |
+
self.root.geometry(f"{self.width}x{self.height}")
|
| 226 |
+
|
| 227 |
+
# Make the window resizable
|
| 228 |
+
self.root.columnconfigure(0, weight=1)
|
| 229 |
+
# We now have two rows - one for logo and one for text
|
| 230 |
+
self.root.rowconfigure(0, weight=0) # Logo row
|
| 231 |
+
self.root.rowconfigure(1, weight=1) # Text content row
|
| 232 |
+
|
| 233 |
+
dashboard_path = Path(constants.SERENA_DASHBOARD_DIR)
|
| 234 |
+
|
| 235 |
+
# Load and display the logo image
|
| 236 |
+
try:
|
| 237 |
+
# construct path relative to path of this file
|
| 238 |
+
image_path = dashboard_path / "serena-logs.png"
|
| 239 |
+
self.logo_image = tk.PhotoImage(file=image_path)
|
| 240 |
+
|
| 241 |
+
# Create a label to display the logo
|
| 242 |
+
self.logo_label = tk.Label(self.root, image=self.logo_image)
|
| 243 |
+
self.logo_label.grid(row=0, column=0, sticky="ew")
|
| 244 |
+
except Exception as e:
|
| 245 |
+
print(f"Error loading logo image: {e}", file=sys.stderr)
|
| 246 |
+
|
| 247 |
+
# Create frame to hold text widget and scrollbars
|
| 248 |
+
frame = tk.Frame(self.root)
|
| 249 |
+
frame.grid(row=1, column=0, sticky="nsew")
|
| 250 |
+
frame.columnconfigure(0, weight=1)
|
| 251 |
+
frame.rowconfigure(0, weight=1)
|
| 252 |
+
|
| 253 |
+
# Create horizontal scrollbar
|
| 254 |
+
h_scrollbar = tk.Scrollbar(frame, orient=tk.HORIZONTAL)
|
| 255 |
+
h_scrollbar.grid(row=1, column=0, sticky="ew")
|
| 256 |
+
|
| 257 |
+
# Create vertical scrollbar
|
| 258 |
+
v_scrollbar = tk.Scrollbar(frame, orient=tk.VERTICAL)
|
| 259 |
+
v_scrollbar.grid(row=0, column=1, sticky="ns")
|
| 260 |
+
|
| 261 |
+
# Create text widget with horizontal scrolling
|
| 262 |
+
self.text_widget = tk.Text(
|
| 263 |
+
frame, wrap=tk.NONE, width=self.width, height=self.height, xscrollcommand=h_scrollbar.set, yscrollcommand=v_scrollbar.set
|
| 264 |
+
)
|
| 265 |
+
self.text_widget.grid(row=0, column=0, sticky="nsew")
|
| 266 |
+
self.text_widget.configure(state=tk.DISABLED) # Make it read-only
|
| 267 |
+
|
| 268 |
+
# Configure scrollbars
|
| 269 |
+
h_scrollbar.config(command=self.text_widget.xview)
|
| 270 |
+
v_scrollbar.config(command=self.text_widget.yview)
|
| 271 |
+
|
| 272 |
+
# Configure tags for different log levels with appropriate colors
|
| 273 |
+
for level, color in self.log_colors.items():
|
| 274 |
+
self.text_widget.tag_configure(level.name, foreground=color)
|
| 275 |
+
|
| 276 |
+
# Configure tag for tool names
|
| 277 |
+
self.text_widget.tag_configure("TOOL_NAME", background="#ffff00")
|
| 278 |
+
|
| 279 |
+
# Set up the queue processing
|
| 280 |
+
self.root.after(100, self._process_queue)
|
| 281 |
+
|
| 282 |
+
# Handle window close event depending on mode
|
| 283 |
+
if self.mode == "dashboard":
|
| 284 |
+
self.root.protocol("WM_DELETE_WINDOW", lambda: self.root.iconify())
|
| 285 |
+
else:
|
| 286 |
+
self.root.protocol("WM_DELETE_WINDOW", self.stop)
|
| 287 |
+
|
| 288 |
+
# Create menu bar
|
| 289 |
+
if self.mode == "dashboard":
|
| 290 |
+
menubar = tk.Menu(self.root)
|
| 291 |
+
server_menu = tk.Menu(menubar, tearoff=0)
|
| 292 |
+
server_menu.add_command(label="Shutdown", command=self._shutdown_server) # type: ignore
|
| 293 |
+
menubar.add_cascade(label="Server", menu=server_menu)
|
| 294 |
+
self.root.config(menu=menubar)
|
| 295 |
+
|
| 296 |
+
# Configure icons
|
| 297 |
+
icon_16 = tk.PhotoImage(file=dashboard_path / "serena-icon-16.png")
|
| 298 |
+
icon_32 = tk.PhotoImage(file=dashboard_path / "serena-icon-32.png")
|
| 299 |
+
icon_48 = tk.PhotoImage(file=dashboard_path / "serena-icon-48.png")
|
| 300 |
+
self.root.iconphoto(False, icon_48, icon_32, icon_16)
|
| 301 |
+
|
| 302 |
+
# Start the Tkinter event loop
|
| 303 |
+
self.root.mainloop()
|
| 304 |
+
|
| 305 |
+
except Exception as e:
|
| 306 |
+
print(f"Error in GUI thread: {e}", file=sys.stderr)
|
| 307 |
+
finally:
|
| 308 |
+
self.running = False
|
| 309 |
+
|
| 310 |
+
def _shutdown_server(self) -> None:
|
| 311 |
+
log.info("Shutting down Serena")
|
| 312 |
+
# noinspection PyUnresolvedReferences
|
| 313 |
+
# noinspection PyProtectedMember
|
| 314 |
+
os._exit(0)
|
| 315 |
+
|
| 316 |
+
|
| 317 |
+
class GuiLogViewerHandler(logging.Handler):
|
| 318 |
+
"""
|
| 319 |
+
A logging handler that sends log records to a ThreadedLogViewer instance.
|
| 320 |
+
This handler can be integrated with Python's standard logging module
|
| 321 |
+
to direct log entries to a GUI log viewer.
|
| 322 |
+
"""
|
| 323 |
+
|
| 324 |
+
def __init__(
|
| 325 |
+
self,
|
| 326 |
+
log_viewer: GuiLogViewer,
|
| 327 |
+
level=logging.NOTSET,
|
| 328 |
+
format_string: str | None = "%(levelname)-5s %(asctime)-15s %(name)s:%(funcName)s:%(lineno)d - %(message)s",
|
| 329 |
+
):
|
| 330 |
+
"""
|
| 331 |
+
Initialize the handler with a ThreadedLogViewer instance.
|
| 332 |
+
|
| 333 |
+
Args:
|
| 334 |
+
log_viewer: A ThreadedLogViewer instance that will display the logs
|
| 335 |
+
level: The logging level (default: NOTSET which captures all logs)
|
| 336 |
+
format_string: the format string
|
| 337 |
+
|
| 338 |
+
"""
|
| 339 |
+
super().__init__(level)
|
| 340 |
+
self.log_viewer = log_viewer
|
| 341 |
+
self.formatter = logging.Formatter(format_string)
|
| 342 |
+
|
| 343 |
+
# Start the log viewer if it's not already running
|
| 344 |
+
if not self.log_viewer.running:
|
| 345 |
+
self.log_viewer.start()
|
| 346 |
+
|
| 347 |
+
@classmethod
|
| 348 |
+
def is_instance_registered(cls) -> bool:
|
| 349 |
+
for h in logging.Logger.root.handlers:
|
| 350 |
+
if isinstance(h, cls):
|
| 351 |
+
return True
|
| 352 |
+
return False
|
| 353 |
+
|
| 354 |
+
def emit(self, record):
|
| 355 |
+
"""
|
| 356 |
+
Emit a log record to the ThreadedLogViewer.
|
| 357 |
+
|
| 358 |
+
Args:
|
| 359 |
+
record: The log record to emit
|
| 360 |
+
|
| 361 |
+
"""
|
| 362 |
+
try:
|
| 363 |
+
# Format the record according to the formatter
|
| 364 |
+
msg = self.format(record)
|
| 365 |
+
|
| 366 |
+
# Convert the level name to a standard format for the viewer
|
| 367 |
+
level_prefix = record.levelname
|
| 368 |
+
|
| 369 |
+
# Add the appropriate prefix if it's not already there
|
| 370 |
+
if not msg.startswith(level_prefix):
|
| 371 |
+
msg = f"{level_prefix}: {msg}"
|
| 372 |
+
|
| 373 |
+
self.log_viewer.add_log(msg)
|
| 374 |
+
|
| 375 |
+
except Exception:
|
| 376 |
+
self.handleError(record)
|
| 377 |
+
|
| 378 |
+
def close(self):
|
| 379 |
+
"""
|
| 380 |
+
Close the handler and optionally stop the log viewer.
|
| 381 |
+
"""
|
| 382 |
+
# We don't automatically stop the log viewer here as it might
|
| 383 |
+
# be used by other handlers or directly by the application
|
| 384 |
+
super().close()
|
| 385 |
+
|
| 386 |
+
def stop_viewer(self):
|
| 387 |
+
"""
|
| 388 |
+
Explicitly stop the associated log viewer.
|
| 389 |
+
"""
|
| 390 |
+
if self.log_viewer.running:
|
| 391 |
+
self.log_viewer.stop()
|
| 392 |
+
|
| 393 |
+
|
| 394 |
+
def show_fatal_exception(e: Exception):
|
| 395 |
+
"""
|
| 396 |
+
Makes sure the given exception is shown in the GUI log viewer,
|
| 397 |
+
either an existing instance or a new one.
|
| 398 |
+
|
| 399 |
+
:param e: the exception to display
|
| 400 |
+
"""
|
| 401 |
+
# show in new window in main thread (user must close it)
|
| 402 |
+
log_viewer = GuiLogViewer("error")
|
| 403 |
+
exc_info = "".join(traceback.format_exception(type(e), e, e.__traceback__))
|
| 404 |
+
log_viewer.add_log(f"ERROR Fatal exception: {e}\n{exc_info}")
|
| 405 |
+
log_viewer.run_gui()
|
projects/ui/serena-new/src/serena/mcp.py
ADDED
|
@@ -0,0 +1,348 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
The Serena Model Context Protocol (MCP) Server
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import sys
|
| 6 |
+
from abc import abstractmethod
|
| 7 |
+
from collections.abc import AsyncIterator, Iterator, Sequence
|
| 8 |
+
from contextlib import asynccontextmanager
|
| 9 |
+
from copy import deepcopy
|
| 10 |
+
from dataclasses import dataclass
|
| 11 |
+
from typing import Any, Literal, cast
|
| 12 |
+
|
| 13 |
+
import docstring_parser
|
| 14 |
+
from mcp.server.fastmcp import server
|
| 15 |
+
from mcp.server.fastmcp.server import FastMCP, Settings
|
| 16 |
+
from mcp.server.fastmcp.tools.base import Tool as MCPTool
|
| 17 |
+
from pydantic_settings import SettingsConfigDict
|
| 18 |
+
from sensai.util import logging
|
| 19 |
+
|
| 20 |
+
from serena.agent import (
|
| 21 |
+
SerenaAgent,
|
| 22 |
+
SerenaConfig,
|
| 23 |
+
)
|
| 24 |
+
from serena.config.context_mode import SerenaAgentContext, SerenaAgentMode
|
| 25 |
+
from serena.constants import DEFAULT_CONTEXT, DEFAULT_MODES, SERENA_LOG_FORMAT
|
| 26 |
+
from serena.tools import Tool
|
| 27 |
+
from serena.util.exception import show_fatal_exception_safe
|
| 28 |
+
from serena.util.logging import MemoryLogHandler
|
| 29 |
+
|
| 30 |
+
log = logging.getLogger(__name__)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def configure_logging(*args, **kwargs) -> None: # type: ignore
|
| 34 |
+
# We only do something here if logging has not yet been configured.
|
| 35 |
+
# Normally, logging is configured in the MCP server startup script.
|
| 36 |
+
if not logging.is_enabled():
|
| 37 |
+
logging.basicConfig(level=logging.INFO, stream=sys.stderr, format=SERENA_LOG_FORMAT)
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
# patch the logging configuration function in fastmcp, because it's hard-coded and broken
|
| 41 |
+
server.configure_logging = configure_logging # type: ignore
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
@dataclass
|
| 45 |
+
class SerenaMCPRequestContext:
|
| 46 |
+
agent: SerenaAgent
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class SerenaMCPFactory:
|
| 50 |
+
def __init__(self, context: str = DEFAULT_CONTEXT, project: str | None = None):
|
| 51 |
+
"""
|
| 52 |
+
:param context: The context name or path to context file
|
| 53 |
+
:param project: Either an absolute path to the project directory or a name of an already registered project.
|
| 54 |
+
If the project passed here hasn't been registered yet, it will be registered automatically and can be activated by its name
|
| 55 |
+
afterward.
|
| 56 |
+
"""
|
| 57 |
+
self.context = SerenaAgentContext.load(context)
|
| 58 |
+
self.project = project
|
| 59 |
+
|
| 60 |
+
@staticmethod
|
| 61 |
+
def _sanitize_for_openai_tools(schema: dict) -> dict:
|
| 62 |
+
"""
|
| 63 |
+
This method was written by GPT-5, I have not reviewed it in detail.
|
| 64 |
+
Only called when `openai_tool_compatible` is True.
|
| 65 |
+
|
| 66 |
+
Make a Pydantic/JSON Schema object compatible with OpenAI tool schema.
|
| 67 |
+
- 'integer' -> 'number' (+ multipleOf: 1)
|
| 68 |
+
- remove 'null' from union type arrays
|
| 69 |
+
- coerce integer-only enums to number
|
| 70 |
+
- best-effort simplify oneOf/anyOf when they only differ by integer/number
|
| 71 |
+
"""
|
| 72 |
+
s = deepcopy(schema)
|
| 73 |
+
|
| 74 |
+
def walk(node): # type: ignore
|
| 75 |
+
if not isinstance(node, dict):
|
| 76 |
+
# lists get handled by parent calls
|
| 77 |
+
return node
|
| 78 |
+
|
| 79 |
+
# ---- handle type ----
|
| 80 |
+
t = node.get("type")
|
| 81 |
+
if isinstance(t, str):
|
| 82 |
+
if t == "integer":
|
| 83 |
+
node["type"] = "number"
|
| 84 |
+
# preserve existing multipleOf but ensure it's integer-like
|
| 85 |
+
if "multipleOf" not in node:
|
| 86 |
+
node["multipleOf"] = 1
|
| 87 |
+
elif isinstance(t, list):
|
| 88 |
+
# remove 'null' (OpenAI tools don't support nullables)
|
| 89 |
+
t2 = [x if x != "integer" else "number" for x in t if x != "null"]
|
| 90 |
+
if not t2:
|
| 91 |
+
# fall back to object if it somehow becomes empty
|
| 92 |
+
t2 = ["object"]
|
| 93 |
+
node["type"] = t2[0] if len(t2) == 1 else t2
|
| 94 |
+
if "integer" in t or "number" in t2:
|
| 95 |
+
# if integers were present, keep integer-like restriction
|
| 96 |
+
node.setdefault("multipleOf", 1)
|
| 97 |
+
|
| 98 |
+
# ---- enums of integers -> number ----
|
| 99 |
+
if "enum" in node and isinstance(node["enum"], list):
|
| 100 |
+
vals = node["enum"]
|
| 101 |
+
if vals and all(isinstance(v, int) for v in vals):
|
| 102 |
+
node.setdefault("type", "number")
|
| 103 |
+
# keep them as ints; JSON 'number' covers ints
|
| 104 |
+
node.setdefault("multipleOf", 1)
|
| 105 |
+
|
| 106 |
+
# ---- simplify anyOf/oneOf if they only differ by integer/number ----
|
| 107 |
+
for key in ("oneOf", "anyOf"):
|
| 108 |
+
if key in node and isinstance(node[key], list):
|
| 109 |
+
# Special case: anyOf or oneOf with "type X" and "null"
|
| 110 |
+
if len(node[key]) == 2:
|
| 111 |
+
types = [sub.get("type") for sub in node[key]]
|
| 112 |
+
if "null" in types:
|
| 113 |
+
non_null_type = next(t for t in types if t != "null")
|
| 114 |
+
if isinstance(non_null_type, str):
|
| 115 |
+
node["type"] = non_null_type
|
| 116 |
+
node.pop(key, None)
|
| 117 |
+
continue
|
| 118 |
+
simplified = []
|
| 119 |
+
changed = False
|
| 120 |
+
for sub in node[key]:
|
| 121 |
+
sub = walk(sub) # recurse
|
| 122 |
+
simplified.append(sub)
|
| 123 |
+
# If all subs are the same after integer→number, collapse
|
| 124 |
+
try:
|
| 125 |
+
import json
|
| 126 |
+
|
| 127 |
+
canon = [json.dumps(x, sort_keys=True) for x in simplified]
|
| 128 |
+
if len(set(canon)) == 1:
|
| 129 |
+
# copy the single schema up
|
| 130 |
+
only = simplified[0]
|
| 131 |
+
node.pop(key, None)
|
| 132 |
+
for k, v in only.items():
|
| 133 |
+
if k not in node:
|
| 134 |
+
node[k] = v
|
| 135 |
+
changed = True
|
| 136 |
+
except Exception:
|
| 137 |
+
pass
|
| 138 |
+
if not changed:
|
| 139 |
+
node[key] = simplified
|
| 140 |
+
|
| 141 |
+
# ---- recurse into known schema containers ----
|
| 142 |
+
for child_key in ("properties", "patternProperties", "definitions", "$defs"):
|
| 143 |
+
if child_key in node and isinstance(node[child_key], dict):
|
| 144 |
+
for k, v in list(node[child_key].items()):
|
| 145 |
+
node[child_key][k] = walk(v)
|
| 146 |
+
|
| 147 |
+
# arrays/items
|
| 148 |
+
if "items" in node:
|
| 149 |
+
node["items"] = walk(node["items"])
|
| 150 |
+
|
| 151 |
+
# allOf/if/then/else - pass through with integer→number conversions applied inside
|
| 152 |
+
for key in ("allOf",):
|
| 153 |
+
if key in node and isinstance(node[key], list):
|
| 154 |
+
node[key] = [walk(x) for x in node[key]]
|
| 155 |
+
|
| 156 |
+
if "if" in node:
|
| 157 |
+
node["if"] = walk(node["if"])
|
| 158 |
+
if "then" in node:
|
| 159 |
+
node["then"] = walk(node["then"])
|
| 160 |
+
if "else" in node:
|
| 161 |
+
node["else"] = walk(node["else"])
|
| 162 |
+
|
| 163 |
+
return node
|
| 164 |
+
|
| 165 |
+
return walk(s)
|
| 166 |
+
|
| 167 |
+
@staticmethod
|
| 168 |
+
def make_mcp_tool(tool: Tool, openai_tool_compatible: bool = True) -> MCPTool:
|
| 169 |
+
"""
|
| 170 |
+
Create an MCP tool from a Serena Tool instance.
|
| 171 |
+
|
| 172 |
+
:param tool: The Serena Tool instance to convert.
|
| 173 |
+
:param openai_tool_compatible: whether to process the tool schema to be compatible with OpenAI tools
|
| 174 |
+
(doesn't accept integer, needs number instead, etc.). This allows using Serena MCP within codex.
|
| 175 |
+
"""
|
| 176 |
+
func_name = tool.get_name()
|
| 177 |
+
func_doc = tool.get_apply_docstring() or ""
|
| 178 |
+
func_arg_metadata = tool.get_apply_fn_metadata()
|
| 179 |
+
is_async = False
|
| 180 |
+
parameters = func_arg_metadata.arg_model.model_json_schema()
|
| 181 |
+
if openai_tool_compatible:
|
| 182 |
+
parameters = SerenaMCPFactory._sanitize_for_openai_tools(parameters)
|
| 183 |
+
|
| 184 |
+
docstring = docstring_parser.parse(func_doc)
|
| 185 |
+
|
| 186 |
+
# Mount the tool description as a combination of the docstring description and
|
| 187 |
+
# the return value description, if it exists.
|
| 188 |
+
overridden_description = tool.agent.get_context().tool_description_overrides.get(func_name, None)
|
| 189 |
+
|
| 190 |
+
if overridden_description is not None:
|
| 191 |
+
func_doc = overridden_description
|
| 192 |
+
elif docstring.description:
|
| 193 |
+
func_doc = docstring.description
|
| 194 |
+
else:
|
| 195 |
+
func_doc = ""
|
| 196 |
+
func_doc = func_doc.strip().strip(".")
|
| 197 |
+
if func_doc:
|
| 198 |
+
func_doc += "."
|
| 199 |
+
if docstring.returns and (docstring_returns_descr := docstring.returns.description):
|
| 200 |
+
# Only add a space before "Returns" if func_doc is not empty
|
| 201 |
+
prefix = " " if func_doc else ""
|
| 202 |
+
func_doc = f"{func_doc}{prefix}Returns {docstring_returns_descr.strip().strip('.')}."
|
| 203 |
+
|
| 204 |
+
# Parse the parameter descriptions from the docstring and add pass its description
|
| 205 |
+
# to the parameter schema.
|
| 206 |
+
docstring_params = {param.arg_name: param for param in docstring.params}
|
| 207 |
+
parameters_properties: dict[str, dict[str, Any]] = parameters["properties"]
|
| 208 |
+
for parameter, properties in parameters_properties.items():
|
| 209 |
+
if (param_doc := docstring_params.get(parameter)) and param_doc.description:
|
| 210 |
+
param_desc = f"{param_doc.description.strip().strip('.') + '.'}"
|
| 211 |
+
properties["description"] = param_desc[0].upper() + param_desc[1:]
|
| 212 |
+
|
| 213 |
+
def execute_fn(**kwargs) -> str: # type: ignore
|
| 214 |
+
return tool.apply_ex(log_call=True, catch_exceptions=True, **kwargs)
|
| 215 |
+
|
| 216 |
+
return MCPTool(
|
| 217 |
+
fn=execute_fn,
|
| 218 |
+
name=func_name,
|
| 219 |
+
description=func_doc,
|
| 220 |
+
parameters=parameters,
|
| 221 |
+
fn_metadata=func_arg_metadata,
|
| 222 |
+
is_async=is_async,
|
| 223 |
+
context_kwarg=None,
|
| 224 |
+
annotations=None,
|
| 225 |
+
title=None,
|
| 226 |
+
)
|
| 227 |
+
|
| 228 |
+
@abstractmethod
|
| 229 |
+
def _iter_tools(self) -> Iterator[Tool]:
|
| 230 |
+
pass
|
| 231 |
+
|
| 232 |
+
# noinspection PyProtectedMember
|
| 233 |
+
def _set_mcp_tools(self, mcp: FastMCP, openai_tool_compatible: bool = False) -> None:
|
| 234 |
+
"""Update the tools in the MCP server"""
|
| 235 |
+
if mcp is not None:
|
| 236 |
+
mcp._tool_manager._tools = {}
|
| 237 |
+
for tool in self._iter_tools():
|
| 238 |
+
mcp_tool = self.make_mcp_tool(tool, openai_tool_compatible=openai_tool_compatible)
|
| 239 |
+
mcp._tool_manager._tools[tool.get_name()] = mcp_tool
|
| 240 |
+
log.info(f"Starting MCP server with {len(mcp._tool_manager._tools)} tools: {list(mcp._tool_manager._tools.keys())}")
|
| 241 |
+
|
| 242 |
+
@abstractmethod
|
| 243 |
+
def _instantiate_agent(self, serena_config: SerenaConfig, modes: list[SerenaAgentMode]) -> None:
|
| 244 |
+
pass
|
| 245 |
+
|
| 246 |
+
def create_mcp_server(
|
| 247 |
+
self,
|
| 248 |
+
host: str = "0.0.0.0",
|
| 249 |
+
port: int = 8000,
|
| 250 |
+
modes: Sequence[str] = DEFAULT_MODES,
|
| 251 |
+
enable_web_dashboard: bool | None = None,
|
| 252 |
+
enable_gui_log_window: bool | None = None,
|
| 253 |
+
log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] | None = None,
|
| 254 |
+
trace_lsp_communication: bool | None = None,
|
| 255 |
+
tool_timeout: float | None = None,
|
| 256 |
+
) -> FastMCP:
|
| 257 |
+
"""
|
| 258 |
+
Create an MCP server with process-isolated SerenaAgent to prevent asyncio contamination.
|
| 259 |
+
|
| 260 |
+
:param host: The host to bind to
|
| 261 |
+
:param port: The port to bind to
|
| 262 |
+
:param modes: List of mode names or paths to mode files
|
| 263 |
+
:param enable_web_dashboard: Whether to enable the web dashboard. If not specified, will take the value from the serena configuration.
|
| 264 |
+
:param enable_gui_log_window: Whether to enable the GUI log window. It currently does not work on macOS, and setting this to True will be ignored then.
|
| 265 |
+
If not specified, will take the value from the serena configuration.
|
| 266 |
+
:param log_level: Log level. If not specified, will take the value from the serena configuration.
|
| 267 |
+
:param trace_lsp_communication: Whether to trace the communication between Serena and the language servers.
|
| 268 |
+
This is useful for debugging language server issues.
|
| 269 |
+
:param tool_timeout: Timeout in seconds for tool execution. If not specified, will take the value from the serena configuration.
|
| 270 |
+
"""
|
| 271 |
+
try:
|
| 272 |
+
config = SerenaConfig.from_config_file()
|
| 273 |
+
|
| 274 |
+
# update configuration with the provided parameters
|
| 275 |
+
if enable_web_dashboard is not None:
|
| 276 |
+
config.web_dashboard = enable_web_dashboard
|
| 277 |
+
if enable_gui_log_window is not None:
|
| 278 |
+
config.gui_log_window_enabled = enable_gui_log_window
|
| 279 |
+
if log_level is not None:
|
| 280 |
+
log_level = cast(Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], log_level.upper())
|
| 281 |
+
config.log_level = logging.getLevelNamesMapping()[log_level]
|
| 282 |
+
if trace_lsp_communication is not None:
|
| 283 |
+
config.trace_lsp_communication = trace_lsp_communication
|
| 284 |
+
if tool_timeout is not None:
|
| 285 |
+
config.tool_timeout = tool_timeout
|
| 286 |
+
|
| 287 |
+
modes_instances = [SerenaAgentMode.load(mode) for mode in modes]
|
| 288 |
+
self._instantiate_agent(config, modes_instances)
|
| 289 |
+
|
| 290 |
+
except Exception as e:
|
| 291 |
+
show_fatal_exception_safe(e)
|
| 292 |
+
raise
|
| 293 |
+
|
| 294 |
+
# Override model_config to disable the use of `.env` files for reading settings, because user projects are likely to contain
|
| 295 |
+
# `.env` files (e.g. containing LOG_LEVEL) that are not supposed to override the MCP settings;
|
| 296 |
+
# retain only FASTMCP_ prefix for already set environment variables.
|
| 297 |
+
Settings.model_config = SettingsConfigDict(env_prefix="FASTMCP_")
|
| 298 |
+
instructions = self._get_initial_instructions()
|
| 299 |
+
mcp = FastMCP(lifespan=self.server_lifespan, host=host, port=port, instructions=instructions)
|
| 300 |
+
return mcp
|
| 301 |
+
|
| 302 |
+
@asynccontextmanager
|
| 303 |
+
@abstractmethod
|
| 304 |
+
async def server_lifespan(self, mcp_server: FastMCP) -> AsyncIterator[None]:
|
| 305 |
+
"""Manage server startup and shutdown lifecycle."""
|
| 306 |
+
yield None # ensures MyPy understands we yield None
|
| 307 |
+
|
| 308 |
+
@abstractmethod
|
| 309 |
+
def _get_initial_instructions(self) -> str:
|
| 310 |
+
pass
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
class SerenaMCPFactorySingleProcess(SerenaMCPFactory):
|
| 314 |
+
"""
|
| 315 |
+
MCP server factory where the SerenaAgent and its language server run in the same process as the MCP server
|
| 316 |
+
"""
|
| 317 |
+
|
| 318 |
+
def __init__(self, context: str = DEFAULT_CONTEXT, project: str | None = None, memory_log_handler: MemoryLogHandler | None = None):
|
| 319 |
+
"""
|
| 320 |
+
:param context: The context name or path to context file
|
| 321 |
+
:param project: Either an absolute path to the project directory or a name of an already registered project.
|
| 322 |
+
If the project passed here hasn't been registered yet, it will be registered automatically and can be activated by its name
|
| 323 |
+
afterward.
|
| 324 |
+
"""
|
| 325 |
+
super().__init__(context=context, project=project)
|
| 326 |
+
self.agent: SerenaAgent | None = None
|
| 327 |
+
self.memory_log_handler = memory_log_handler
|
| 328 |
+
|
| 329 |
+
def _instantiate_agent(self, serena_config: SerenaConfig, modes: list[SerenaAgentMode]) -> None:
|
| 330 |
+
self.agent = SerenaAgent(
|
| 331 |
+
project=self.project, serena_config=serena_config, context=self.context, modes=modes, memory_log_handler=self.memory_log_handler
|
| 332 |
+
)
|
| 333 |
+
|
| 334 |
+
def _iter_tools(self) -> Iterator[Tool]:
|
| 335 |
+
assert self.agent is not None
|
| 336 |
+
yield from self.agent.get_exposed_tool_instances()
|
| 337 |
+
|
| 338 |
+
def _get_initial_instructions(self) -> str:
|
| 339 |
+
assert self.agent is not None
|
| 340 |
+
# we don't use the tool (which at the time of writing calls this method), since the tool may be disabled by the config
|
| 341 |
+
return self.agent.create_system_prompt()
|
| 342 |
+
|
| 343 |
+
@asynccontextmanager
|
| 344 |
+
async def server_lifespan(self, mcp_server: FastMCP) -> AsyncIterator[None]:
|
| 345 |
+
openai_tool_compatible = self.context.name in ["chatgpt", "codex", "oaicompat-agent"]
|
| 346 |
+
self._set_mcp_tools(mcp_server, openai_tool_compatible=openai_tool_compatible)
|
| 347 |
+
log.info("MCP server lifetime setup complete")
|
| 348 |
+
yield
|
projects/ui/serena-new/src/serena/project.py
ADDED
|
@@ -0,0 +1,306 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
from pathlib import Path
|
| 4 |
+
|
| 5 |
+
import pathspec
|
| 6 |
+
|
| 7 |
+
from serena.config.serena_config import DEFAULT_TOOL_TIMEOUT, ProjectConfig
|
| 8 |
+
from serena.constants import SERENA_MANAGED_DIR_IN_HOME, SERENA_MANAGED_DIR_NAME
|
| 9 |
+
from serena.text_utils import MatchedConsecutiveLines, search_files
|
| 10 |
+
from serena.util.file_system import GitignoreParser, match_path
|
| 11 |
+
from solidlsp import SolidLanguageServer
|
| 12 |
+
from solidlsp.ls_config import Language, LanguageServerConfig
|
| 13 |
+
from solidlsp.ls_logger import LanguageServerLogger
|
| 14 |
+
from solidlsp.settings import SolidLSPSettings
|
| 15 |
+
|
| 16 |
+
log = logging.getLogger(__name__)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class Project:
|
| 20 |
+
def __init__(self, project_root: str, project_config: ProjectConfig, is_newly_created: bool = False):
|
| 21 |
+
self.project_root = project_root
|
| 22 |
+
self.project_config = project_config
|
| 23 |
+
self.is_newly_created = is_newly_created
|
| 24 |
+
|
| 25 |
+
# create .gitignore file in the project's Serena data folder if not yet present
|
| 26 |
+
serena_data_gitignore_path = os.path.join(self.path_to_serena_data_folder(), ".gitignore")
|
| 27 |
+
if not os.path.exists(serena_data_gitignore_path):
|
| 28 |
+
os.makedirs(os.path.dirname(serena_data_gitignore_path), exist_ok=True)
|
| 29 |
+
log.info(f"Creating .gitignore file in {serena_data_gitignore_path}")
|
| 30 |
+
with open(serena_data_gitignore_path, "w", encoding="utf-8") as f:
|
| 31 |
+
f.write(f"/{SolidLanguageServer.CACHE_FOLDER_NAME}\n")
|
| 32 |
+
|
| 33 |
+
# gather ignored paths from the project configuration and gitignore files
|
| 34 |
+
ignored_patterns = project_config.ignored_paths
|
| 35 |
+
if len(ignored_patterns) > 0:
|
| 36 |
+
log.info(f"Using {len(ignored_patterns)} ignored paths from the explicit project configuration.")
|
| 37 |
+
log.debug(f"Ignored paths: {ignored_patterns}")
|
| 38 |
+
if project_config.ignore_all_files_in_gitignore:
|
| 39 |
+
gitignore_parser = GitignoreParser(self.project_root)
|
| 40 |
+
for spec in gitignore_parser.get_ignore_specs():
|
| 41 |
+
log.debug(f"Adding {len(spec.patterns)} patterns from {spec.file_path} to the ignored paths.")
|
| 42 |
+
ignored_patterns.extend(spec.patterns)
|
| 43 |
+
self._ignored_patterns = ignored_patterns
|
| 44 |
+
|
| 45 |
+
# Set up the pathspec matcher for the ignored paths
|
| 46 |
+
# for all absolute paths in ignored_paths, convert them to relative paths
|
| 47 |
+
processed_patterns = []
|
| 48 |
+
for pattern in set(ignored_patterns):
|
| 49 |
+
# Normalize separators (pathspec expects forward slashes)
|
| 50 |
+
pattern = pattern.replace(os.path.sep, "/")
|
| 51 |
+
processed_patterns.append(pattern)
|
| 52 |
+
log.debug(f"Processing {len(processed_patterns)} ignored paths")
|
| 53 |
+
self._ignore_spec = pathspec.PathSpec.from_lines(pathspec.patterns.GitWildMatchPattern, processed_patterns)
|
| 54 |
+
|
| 55 |
+
@property
|
| 56 |
+
def project_name(self) -> str:
|
| 57 |
+
return self.project_config.project_name
|
| 58 |
+
|
| 59 |
+
@property
|
| 60 |
+
def language(self) -> Language:
|
| 61 |
+
return self.project_config.language
|
| 62 |
+
|
| 63 |
+
@classmethod
|
| 64 |
+
def load(cls, project_root: str | Path, autogenerate: bool = True) -> "Project":
|
| 65 |
+
project_root = Path(project_root).resolve()
|
| 66 |
+
if not project_root.exists():
|
| 67 |
+
raise FileNotFoundError(f"Project root not found: {project_root}")
|
| 68 |
+
project_config = ProjectConfig.load(project_root, autogenerate=autogenerate)
|
| 69 |
+
return Project(project_root=str(project_root), project_config=project_config)
|
| 70 |
+
|
| 71 |
+
def path_to_serena_data_folder(self) -> str:
|
| 72 |
+
return os.path.join(self.project_root, SERENA_MANAGED_DIR_NAME)
|
| 73 |
+
|
| 74 |
+
def path_to_project_yml(self) -> str:
|
| 75 |
+
return os.path.join(self.project_root, self.project_config.rel_path_to_project_yml())
|
| 76 |
+
|
| 77 |
+
def read_file(self, relative_path: str) -> str:
|
| 78 |
+
"""
|
| 79 |
+
Reads a file relative to the project root.
|
| 80 |
+
|
| 81 |
+
:param relative_path: the path to the file relative to the project root
|
| 82 |
+
:return: the content of the file
|
| 83 |
+
"""
|
| 84 |
+
abs_path = Path(self.project_root) / relative_path
|
| 85 |
+
if not abs_path.exists():
|
| 86 |
+
raise FileNotFoundError(f"File not found: {abs_path}")
|
| 87 |
+
return abs_path.read_text(encoding=self.project_config.encoding)
|
| 88 |
+
|
| 89 |
+
def get_ignore_spec(self) -> pathspec.PathSpec:
|
| 90 |
+
"""
|
| 91 |
+
:return: the pathspec matcher for the paths that were configured to be ignored,
|
| 92 |
+
either explicitly or implicitly through .gitignore files.
|
| 93 |
+
"""
|
| 94 |
+
return self._ignore_spec
|
| 95 |
+
|
| 96 |
+
def _is_ignored_relative_path(self, relative_path: str | Path, ignore_non_source_files: bool = True) -> bool:
|
| 97 |
+
"""
|
| 98 |
+
Determine whether an existing path should be ignored based on file type and ignore patterns.
|
| 99 |
+
Raises `FileNotFoundError` if the path does not exist.
|
| 100 |
+
|
| 101 |
+
:param relative_path: Relative path to check
|
| 102 |
+
:param ignore_non_source_files: whether files that are not source files (according to the file masks
|
| 103 |
+
determined by the project's programming language) shall be ignored
|
| 104 |
+
|
| 105 |
+
:return: whether the path should be ignored
|
| 106 |
+
"""
|
| 107 |
+
abs_path = os.path.join(self.project_root, relative_path)
|
| 108 |
+
if not os.path.exists(abs_path):
|
| 109 |
+
raise FileNotFoundError(f"File {abs_path} not found, the ignore check cannot be performed")
|
| 110 |
+
|
| 111 |
+
# Check file extension if it's a file
|
| 112 |
+
is_file = os.path.isfile(abs_path)
|
| 113 |
+
if is_file and ignore_non_source_files:
|
| 114 |
+
fn_matcher = self.language.get_source_fn_matcher()
|
| 115 |
+
if not fn_matcher.is_relevant_filename(abs_path):
|
| 116 |
+
return True
|
| 117 |
+
|
| 118 |
+
# Create normalized path for consistent handling
|
| 119 |
+
rel_path = Path(relative_path)
|
| 120 |
+
|
| 121 |
+
# always ignore paths inside .git
|
| 122 |
+
if len(rel_path.parts) > 0 and rel_path.parts[0] == ".git":
|
| 123 |
+
return True
|
| 124 |
+
|
| 125 |
+
return match_path(str(relative_path), self.get_ignore_spec(), root_path=self.project_root)
|
| 126 |
+
|
| 127 |
+
def is_ignored_path(self, path: str | Path, ignore_non_source_files: bool = False) -> bool:
|
| 128 |
+
"""
|
| 129 |
+
Checks whether the given path is ignored
|
| 130 |
+
|
| 131 |
+
:param path: the path to check, can be absolute or relative
|
| 132 |
+
:param ignore_non_source_files: whether to ignore files that are not source files
|
| 133 |
+
(according to the file masks determined by the project's programming language)
|
| 134 |
+
"""
|
| 135 |
+
path = Path(path)
|
| 136 |
+
if path.is_absolute():
|
| 137 |
+
try:
|
| 138 |
+
relative_path = path.relative_to(self.project_root)
|
| 139 |
+
except ValueError:
|
| 140 |
+
# If the path is not relative to the project root, we consider it as an absolute path outside the project
|
| 141 |
+
# (which we ignore)
|
| 142 |
+
log.warning(f"Path {path} is not relative to the project root {self.project_root} and was therefore ignored")
|
| 143 |
+
return True
|
| 144 |
+
else:
|
| 145 |
+
relative_path = path
|
| 146 |
+
|
| 147 |
+
return self._is_ignored_relative_path(str(relative_path), ignore_non_source_files=ignore_non_source_files)
|
| 148 |
+
|
| 149 |
+
def is_path_in_project(self, path: str | Path) -> bool:
|
| 150 |
+
"""
|
| 151 |
+
Checks if the given (absolute or relative) path is inside the project directory.
|
| 152 |
+
Note that even relative paths may be outside if they contain ".." or point to symlinks.
|
| 153 |
+
"""
|
| 154 |
+
path = Path(path)
|
| 155 |
+
_proj_root = Path(self.project_root)
|
| 156 |
+
if not path.is_absolute():
|
| 157 |
+
path = _proj_root / path
|
| 158 |
+
|
| 159 |
+
path = path.resolve()
|
| 160 |
+
return path.is_relative_to(_proj_root)
|
| 161 |
+
|
| 162 |
+
def relative_path_exists(self, relative_path: str) -> bool:
|
| 163 |
+
"""
|
| 164 |
+
Checks if the given relative path exists in the project directory.
|
| 165 |
+
|
| 166 |
+
:param relative_path: the path to check, relative to the project root
|
| 167 |
+
:return: True if the path exists, False otherwise
|
| 168 |
+
"""
|
| 169 |
+
abs_path = Path(self.project_root) / relative_path
|
| 170 |
+
return abs_path.exists()
|
| 171 |
+
|
| 172 |
+
def validate_relative_path(self, relative_path: str) -> None:
|
| 173 |
+
"""
|
| 174 |
+
Validates that the given relative path to an existing file/dir is safe to read or edit,
|
| 175 |
+
meaning it's inside the project directory and is not ignored by git.
|
| 176 |
+
|
| 177 |
+
Passing a path to a non-existing file will lead to a `FileNotFoundError`.
|
| 178 |
+
"""
|
| 179 |
+
if not self.is_path_in_project(relative_path):
|
| 180 |
+
raise ValueError(f"{relative_path=} points to path outside of the repository root; cannot access for safety reasons")
|
| 181 |
+
|
| 182 |
+
if self.is_ignored_path(relative_path):
|
| 183 |
+
raise ValueError(f"Path {relative_path} is ignored; cannot access for safety reasons")
|
| 184 |
+
|
| 185 |
+
def gather_source_files(self, relative_path: str = "") -> list[str]:
|
| 186 |
+
"""Retrieves relative paths of all source files, optionally limited to the given path
|
| 187 |
+
|
| 188 |
+
:param relative_path: if provided, restrict search to this path
|
| 189 |
+
"""
|
| 190 |
+
rel_file_paths = []
|
| 191 |
+
start_path = os.path.join(self.project_root, relative_path)
|
| 192 |
+
if not os.path.exists(start_path):
|
| 193 |
+
raise FileNotFoundError(f"Relative path {start_path} not found.")
|
| 194 |
+
if os.path.isfile(start_path):
|
| 195 |
+
return [relative_path]
|
| 196 |
+
else:
|
| 197 |
+
for root, dirs, files in os.walk(start_path, followlinks=True):
|
| 198 |
+
# prevent recursion into ignored directories
|
| 199 |
+
dirs[:] = [d for d in dirs if not self.is_ignored_path(os.path.join(root, d))]
|
| 200 |
+
|
| 201 |
+
# collect non-ignored files
|
| 202 |
+
for file in files:
|
| 203 |
+
abs_file_path = os.path.join(root, file)
|
| 204 |
+
try:
|
| 205 |
+
if not self.is_ignored_path(abs_file_path, ignore_non_source_files=True):
|
| 206 |
+
try:
|
| 207 |
+
rel_file_path = os.path.relpath(abs_file_path, start=self.project_root)
|
| 208 |
+
except Exception:
|
| 209 |
+
log.warning(
|
| 210 |
+
"Ignoring path '%s' because it appears to be outside of the project root (%s)",
|
| 211 |
+
abs_file_path,
|
| 212 |
+
self.project_root,
|
| 213 |
+
)
|
| 214 |
+
continue
|
| 215 |
+
rel_file_paths.append(rel_file_path)
|
| 216 |
+
except FileNotFoundError:
|
| 217 |
+
log.warning(
|
| 218 |
+
f"File {abs_file_path} not found (possibly due it being a symlink), skipping it in request_parsed_files",
|
| 219 |
+
)
|
| 220 |
+
return rel_file_paths
|
| 221 |
+
|
| 222 |
+
def search_source_files_for_pattern(
|
| 223 |
+
self,
|
| 224 |
+
pattern: str,
|
| 225 |
+
relative_path: str = "",
|
| 226 |
+
context_lines_before: int = 0,
|
| 227 |
+
context_lines_after: int = 0,
|
| 228 |
+
paths_include_glob: str | None = None,
|
| 229 |
+
paths_exclude_glob: str | None = None,
|
| 230 |
+
) -> list[MatchedConsecutiveLines]:
|
| 231 |
+
"""
|
| 232 |
+
Search for a pattern across all (non-ignored) source files
|
| 233 |
+
|
| 234 |
+
:param pattern: Regular expression pattern to search for, either as a compiled Pattern or string
|
| 235 |
+
:param relative_path:
|
| 236 |
+
:param context_lines_before: Number of lines of context to include before each match
|
| 237 |
+
:param context_lines_after: Number of lines of context to include after each match
|
| 238 |
+
:param paths_include_glob: Glob pattern to filter which files to include in the search
|
| 239 |
+
:param paths_exclude_glob: Glob pattern to filter which files to exclude from the search. Takes precedence over paths_include_glob.
|
| 240 |
+
:return: List of matched consecutive lines with context
|
| 241 |
+
"""
|
| 242 |
+
relative_file_paths = self.gather_source_files(relative_path=relative_path)
|
| 243 |
+
return search_files(
|
| 244 |
+
relative_file_paths,
|
| 245 |
+
pattern,
|
| 246 |
+
root_path=self.project_root,
|
| 247 |
+
context_lines_before=context_lines_before,
|
| 248 |
+
context_lines_after=context_lines_after,
|
| 249 |
+
paths_include_glob=paths_include_glob,
|
| 250 |
+
paths_exclude_glob=paths_exclude_glob,
|
| 251 |
+
)
|
| 252 |
+
|
| 253 |
+
def retrieve_content_around_line(
|
| 254 |
+
self, relative_file_path: str, line: int, context_lines_before: int = 0, context_lines_after: int = 0
|
| 255 |
+
) -> MatchedConsecutiveLines:
|
| 256 |
+
"""
|
| 257 |
+
Retrieve the content of the given file around the given line.
|
| 258 |
+
|
| 259 |
+
:param relative_file_path: The relative path of the file to retrieve the content from
|
| 260 |
+
:param line: The line number to retrieve the content around
|
| 261 |
+
:param context_lines_before: The number of lines to retrieve before the given line
|
| 262 |
+
:param context_lines_after: The number of lines to retrieve after the given line
|
| 263 |
+
|
| 264 |
+
:return MatchedConsecutiveLines: A container with the desired lines.
|
| 265 |
+
"""
|
| 266 |
+
file_contents = self.read_file(relative_file_path)
|
| 267 |
+
return MatchedConsecutiveLines.from_file_contents(
|
| 268 |
+
file_contents,
|
| 269 |
+
line=line,
|
| 270 |
+
context_lines_before=context_lines_before,
|
| 271 |
+
context_lines_after=context_lines_after,
|
| 272 |
+
source_file_path=relative_file_path,
|
| 273 |
+
)
|
| 274 |
+
|
| 275 |
+
def create_language_server(
|
| 276 |
+
self,
|
| 277 |
+
log_level: int = logging.INFO,
|
| 278 |
+
ls_timeout: float | None = DEFAULT_TOOL_TIMEOUT - 5,
|
| 279 |
+
trace_lsp_communication: bool = False,
|
| 280 |
+
) -> SolidLanguageServer:
|
| 281 |
+
"""
|
| 282 |
+
Create a language server for a project. Note that you will have to start it
|
| 283 |
+
before performing any LS operations.
|
| 284 |
+
|
| 285 |
+
:param project: either a path to the project root or a ProjectConfig instance.
|
| 286 |
+
If no project.yml is found, the default project configuration will be used.
|
| 287 |
+
:param log_level: the log level for the language server
|
| 288 |
+
:param ls_timeout: the timeout for the language server
|
| 289 |
+
:param trace_lsp_communication: whether to trace LSP communication
|
| 290 |
+
:return: the language server
|
| 291 |
+
"""
|
| 292 |
+
ls_config = LanguageServerConfig(
|
| 293 |
+
code_language=self.language,
|
| 294 |
+
ignored_paths=self._ignored_patterns,
|
| 295 |
+
trace_lsp_communication=trace_lsp_communication,
|
| 296 |
+
)
|
| 297 |
+
ls_logger = LanguageServerLogger(log_level=log_level)
|
| 298 |
+
|
| 299 |
+
log.info(f"Creating language server instance for {self.project_root}.")
|
| 300 |
+
return SolidLanguageServer.create(
|
| 301 |
+
ls_config,
|
| 302 |
+
ls_logger,
|
| 303 |
+
self.project_root,
|
| 304 |
+
timeout=ls_timeout,
|
| 305 |
+
solidlsp_settings=SolidLSPSettings(solidlsp_dir=SERENA_MANAGED_DIR_IN_HOME, project_data_relative_path=SERENA_MANAGED_DIR_NAME),
|
| 306 |
+
)
|
projects/ui/serena-new/src/serena/prompt_factory.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
|
| 3 |
+
from serena.constants import PROMPT_TEMPLATES_DIR_IN_USER_HOME, PROMPT_TEMPLATES_DIR_INTERNAL
|
| 4 |
+
from serena.generated.generated_prompt_factory import PromptFactory
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class SerenaPromptFactory(PromptFactory):
|
| 8 |
+
"""
|
| 9 |
+
A class for retrieving and rendering prompt templates and prompt lists.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
def __init__(self) -> None:
|
| 13 |
+
os.makedirs(PROMPT_TEMPLATES_DIR_IN_USER_HOME, exist_ok=True)
|
| 14 |
+
super().__init__(prompts_dir=[PROMPT_TEMPLATES_DIR_IN_USER_HOME, PROMPT_TEMPLATES_DIR_INTERNAL])
|
projects/ui/serena-new/src/serena/symbol.py
ADDED
|
@@ -0,0 +1,645 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
from abc import ABC, abstractmethod
|
| 5 |
+
from collections.abc import Iterator, Sequence
|
| 6 |
+
from dataclasses import asdict, dataclass
|
| 7 |
+
from typing import TYPE_CHECKING, Any, Self, Union
|
| 8 |
+
|
| 9 |
+
from sensai.util.string import ToStringMixin
|
| 10 |
+
|
| 11 |
+
from solidlsp import SolidLanguageServer
|
| 12 |
+
from solidlsp.ls import ReferenceInSymbol as LSPReferenceInSymbol
|
| 13 |
+
from solidlsp.ls_types import Position, SymbolKind, UnifiedSymbolInformation
|
| 14 |
+
|
| 15 |
+
from .project import Project
|
| 16 |
+
|
| 17 |
+
if TYPE_CHECKING:
|
| 18 |
+
from .agent import SerenaAgent
|
| 19 |
+
|
| 20 |
+
log = logging.getLogger(__name__)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
@dataclass
|
| 24 |
+
class LanguageServerSymbolLocation:
|
| 25 |
+
"""
|
| 26 |
+
Represents the (start) location of a symbol identifier, which, within Serena, uniquely identifies the symbol.
|
| 27 |
+
"""
|
| 28 |
+
|
| 29 |
+
relative_path: str | None
|
| 30 |
+
"""
|
| 31 |
+
the relative path of the file containing the symbol; if None, the symbol is defined outside of the project's scope
|
| 32 |
+
"""
|
| 33 |
+
line: int | None
|
| 34 |
+
"""
|
| 35 |
+
the line number in which the symbol identifier is defined (if the symbol is a function, class, etc.);
|
| 36 |
+
may be None for some types of symbols (e.g. SymbolKind.File)
|
| 37 |
+
"""
|
| 38 |
+
column: int | None
|
| 39 |
+
"""
|
| 40 |
+
the column number in which the symbol identifier is defined (if the symbol is a function, class, etc.);
|
| 41 |
+
may be None for some types of symbols (e.g. SymbolKind.File)
|
| 42 |
+
"""
|
| 43 |
+
|
| 44 |
+
def __post_init__(self) -> None:
|
| 45 |
+
if self.relative_path is not None:
|
| 46 |
+
self.relative_path = self.relative_path.replace("/", os.path.sep)
|
| 47 |
+
|
| 48 |
+
def to_dict(self, include_relative_path: bool = True) -> dict[str, Any]:
|
| 49 |
+
result = asdict(self)
|
| 50 |
+
if not include_relative_path:
|
| 51 |
+
result.pop("relative_path", None)
|
| 52 |
+
return result
|
| 53 |
+
|
| 54 |
+
def has_position_in_file(self) -> bool:
|
| 55 |
+
return self.relative_path is not None and self.line is not None and self.column is not None
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
@dataclass
|
| 59 |
+
class PositionInFile:
|
| 60 |
+
"""
|
| 61 |
+
Represents a character position within a file
|
| 62 |
+
"""
|
| 63 |
+
|
| 64 |
+
line: int
|
| 65 |
+
"""
|
| 66 |
+
the 0-based line number in the file
|
| 67 |
+
"""
|
| 68 |
+
col: int
|
| 69 |
+
"""
|
| 70 |
+
the 0-based column
|
| 71 |
+
"""
|
| 72 |
+
|
| 73 |
+
def to_lsp_position(self) -> Position:
|
| 74 |
+
"""
|
| 75 |
+
Convert to LSP Position.
|
| 76 |
+
"""
|
| 77 |
+
return Position(line=self.line, character=self.col)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class Symbol(ABC):
|
| 81 |
+
@abstractmethod
|
| 82 |
+
def get_body_start_position(self) -> PositionInFile | None:
|
| 83 |
+
pass
|
| 84 |
+
|
| 85 |
+
@abstractmethod
|
| 86 |
+
def get_body_end_position(self) -> PositionInFile | None:
|
| 87 |
+
pass
|
| 88 |
+
|
| 89 |
+
def get_body_start_position_or_raise(self) -> PositionInFile:
|
| 90 |
+
"""
|
| 91 |
+
Get the start position of the symbol body, raising an error if it is not defined.
|
| 92 |
+
"""
|
| 93 |
+
pos = self.get_body_start_position()
|
| 94 |
+
if pos is None:
|
| 95 |
+
raise ValueError(f"Body start position is not defined for {self}")
|
| 96 |
+
return pos
|
| 97 |
+
|
| 98 |
+
def get_body_end_position_or_raise(self) -> PositionInFile:
|
| 99 |
+
"""
|
| 100 |
+
Get the end position of the symbol body, raising an error if it is not defined.
|
| 101 |
+
"""
|
| 102 |
+
pos = self.get_body_end_position()
|
| 103 |
+
if pos is None:
|
| 104 |
+
raise ValueError(f"Body end position is not defined for {self}")
|
| 105 |
+
return pos
|
| 106 |
+
|
| 107 |
+
@abstractmethod
|
| 108 |
+
def is_neighbouring_definition_separated_by_empty_line(self) -> bool:
|
| 109 |
+
"""
|
| 110 |
+
:return: whether a symbol definition of this symbol's kind is usually separated from the
|
| 111 |
+
previous/next definition by at least one empty line.
|
| 112 |
+
"""
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class LanguageServerSymbol(Symbol, ToStringMixin):
|
| 116 |
+
_NAME_PATH_SEP = "/"
|
| 117 |
+
|
| 118 |
+
@staticmethod
|
| 119 |
+
def match_name_path(
|
| 120 |
+
name_path: str,
|
| 121 |
+
symbol_name_path_parts: list[str],
|
| 122 |
+
substring_matching: bool,
|
| 123 |
+
) -> bool:
|
| 124 |
+
"""
|
| 125 |
+
Checks if a given `name_path` matches a symbol's qualified name parts.
|
| 126 |
+
See docstring of `Symbol.find` for more details.
|
| 127 |
+
"""
|
| 128 |
+
assert name_path, "name_path must not be empty"
|
| 129 |
+
assert symbol_name_path_parts, "symbol_name_path_parts must not be empty"
|
| 130 |
+
name_path_sep = LanguageServerSymbol._NAME_PATH_SEP
|
| 131 |
+
|
| 132 |
+
is_absolute_pattern = name_path.startswith(name_path_sep)
|
| 133 |
+
pattern_parts = name_path.lstrip(name_path_sep).rstrip(name_path_sep).split(name_path_sep)
|
| 134 |
+
|
| 135 |
+
# filtering based on ancestors
|
| 136 |
+
if len(pattern_parts) > len(symbol_name_path_parts):
|
| 137 |
+
# can't possibly match if pattern has more parts than symbol
|
| 138 |
+
return False
|
| 139 |
+
if is_absolute_pattern and len(pattern_parts) != len(symbol_name_path_parts):
|
| 140 |
+
# for absolute patterns, the number of parts must match exactly
|
| 141 |
+
return False
|
| 142 |
+
if symbol_name_path_parts[-len(pattern_parts) : -1] != pattern_parts[:-1]:
|
| 143 |
+
# ancestors must match
|
| 144 |
+
return False
|
| 145 |
+
|
| 146 |
+
# matching the last part of the symbol name
|
| 147 |
+
name_to_match = pattern_parts[-1]
|
| 148 |
+
symbol_name = symbol_name_path_parts[-1]
|
| 149 |
+
if substring_matching:
|
| 150 |
+
return name_to_match in symbol_name
|
| 151 |
+
else:
|
| 152 |
+
return name_to_match == symbol_name
|
| 153 |
+
|
| 154 |
+
def __init__(self, symbol_root_from_ls: UnifiedSymbolInformation) -> None:
|
| 155 |
+
self.symbol_root = symbol_root_from_ls
|
| 156 |
+
|
| 157 |
+
def _tostring_includes(self) -> list[str]:
|
| 158 |
+
return []
|
| 159 |
+
|
| 160 |
+
def _tostring_additional_entries(self) -> dict[str, Any]:
|
| 161 |
+
return dict(name=self.name, kind=self.kind, num_children=len(self.symbol_root["children"]))
|
| 162 |
+
|
| 163 |
+
@property
|
| 164 |
+
def name(self) -> str:
|
| 165 |
+
return self.symbol_root["name"]
|
| 166 |
+
|
| 167 |
+
@property
|
| 168 |
+
def kind(self) -> str:
|
| 169 |
+
return SymbolKind(self.symbol_kind).name
|
| 170 |
+
|
| 171 |
+
@property
|
| 172 |
+
def symbol_kind(self) -> SymbolKind:
|
| 173 |
+
return self.symbol_root["kind"]
|
| 174 |
+
|
| 175 |
+
def is_neighbouring_definition_separated_by_empty_line(self) -> bool:
|
| 176 |
+
return self.symbol_kind in (SymbolKind.Function, SymbolKind.Method, SymbolKind.Class, SymbolKind.Interface, SymbolKind.Struct)
|
| 177 |
+
|
| 178 |
+
@property
|
| 179 |
+
def relative_path(self) -> str | None:
|
| 180 |
+
location = self.symbol_root.get("location")
|
| 181 |
+
if location:
|
| 182 |
+
return location.get("relativePath")
|
| 183 |
+
return None
|
| 184 |
+
|
| 185 |
+
@property
|
| 186 |
+
def location(self) -> LanguageServerSymbolLocation:
|
| 187 |
+
"""
|
| 188 |
+
:return: the start location of the actual symbol identifier
|
| 189 |
+
"""
|
| 190 |
+
return LanguageServerSymbolLocation(relative_path=self.relative_path, line=self.line, column=self.column)
|
| 191 |
+
|
| 192 |
+
@property
|
| 193 |
+
def body_start_position(self) -> Position | None:
|
| 194 |
+
location = self.symbol_root.get("location")
|
| 195 |
+
if location:
|
| 196 |
+
range_info = location.get("range")
|
| 197 |
+
if range_info:
|
| 198 |
+
start_pos = range_info.get("start")
|
| 199 |
+
if start_pos:
|
| 200 |
+
return start_pos
|
| 201 |
+
return None
|
| 202 |
+
|
| 203 |
+
@property
|
| 204 |
+
def body_end_position(self) -> Position | None:
|
| 205 |
+
location = self.symbol_root.get("location")
|
| 206 |
+
if location:
|
| 207 |
+
range_info = location.get("range")
|
| 208 |
+
if range_info:
|
| 209 |
+
end_pos = range_info.get("end")
|
| 210 |
+
if end_pos:
|
| 211 |
+
return end_pos
|
| 212 |
+
return None
|
| 213 |
+
|
| 214 |
+
def get_body_start_position(self) -> PositionInFile | None:
|
| 215 |
+
start_pos = self.body_start_position
|
| 216 |
+
if start_pos is None:
|
| 217 |
+
return None
|
| 218 |
+
return PositionInFile(line=start_pos["line"], col=start_pos["character"])
|
| 219 |
+
|
| 220 |
+
def get_body_end_position(self) -> PositionInFile | None:
|
| 221 |
+
end_pos = self.body_end_position
|
| 222 |
+
if end_pos is None:
|
| 223 |
+
return None
|
| 224 |
+
return PositionInFile(line=end_pos["line"], col=end_pos["character"])
|
| 225 |
+
|
| 226 |
+
def get_body_line_numbers(self) -> tuple[int | None, int | None]:
|
| 227 |
+
start_pos = self.body_start_position
|
| 228 |
+
end_pos = self.body_end_position
|
| 229 |
+
start_line = start_pos["line"] if start_pos else None
|
| 230 |
+
end_line = end_pos["line"] if end_pos else None
|
| 231 |
+
return start_line, end_line
|
| 232 |
+
|
| 233 |
+
@property
|
| 234 |
+
def line(self) -> int | None:
|
| 235 |
+
"""
|
| 236 |
+
:return: the line in which the symbol identifier is defined.
|
| 237 |
+
"""
|
| 238 |
+
if "selectionRange" in self.symbol_root:
|
| 239 |
+
return self.symbol_root["selectionRange"]["start"]["line"]
|
| 240 |
+
else:
|
| 241 |
+
# line is expected to be undefined for some types of symbols (e.g. SymbolKind.File)
|
| 242 |
+
return None
|
| 243 |
+
|
| 244 |
+
@property
|
| 245 |
+
def column(self) -> int | None:
|
| 246 |
+
if "selectionRange" in self.symbol_root:
|
| 247 |
+
return self.symbol_root["selectionRange"]["start"]["character"]
|
| 248 |
+
else:
|
| 249 |
+
# precise location is expected to be undefined for some types of symbols (e.g. SymbolKind.File)
|
| 250 |
+
return None
|
| 251 |
+
|
| 252 |
+
@property
|
| 253 |
+
def body(self) -> str | None:
|
| 254 |
+
return self.symbol_root.get("body")
|
| 255 |
+
|
| 256 |
+
def get_name_path(self) -> str:
|
| 257 |
+
"""
|
| 258 |
+
Get the name path of the symbol (e.g. "class/method/inner_function").
|
| 259 |
+
"""
|
| 260 |
+
return self._NAME_PATH_SEP.join(self.get_name_path_parts())
|
| 261 |
+
|
| 262 |
+
def get_name_path_parts(self) -> list[str]:
|
| 263 |
+
"""
|
| 264 |
+
Get the parts of the name path of the symbol (e.g. ["class", "method", "inner_function"]).
|
| 265 |
+
"""
|
| 266 |
+
ancestors_within_file = list(self.iter_ancestors(up_to_symbol_kind=SymbolKind.File))
|
| 267 |
+
ancestors_within_file.reverse()
|
| 268 |
+
return [a.name for a in ancestors_within_file] + [self.name]
|
| 269 |
+
|
| 270 |
+
def iter_children(self) -> Iterator[Self]:
|
| 271 |
+
for c in self.symbol_root["children"]:
|
| 272 |
+
yield self.__class__(c)
|
| 273 |
+
|
| 274 |
+
def iter_ancestors(self, up_to_symbol_kind: SymbolKind | None = None) -> Iterator[Self]:
|
| 275 |
+
"""
|
| 276 |
+
Iterate over all ancestors of the symbol, starting with the parent and going up to the root or
|
| 277 |
+
the given symbol kind.
|
| 278 |
+
|
| 279 |
+
:param up_to_symbol_kind: if provided, iteration will stop *before* the first ancestor of the given kind.
|
| 280 |
+
A typical use case is to pass `SymbolKind.File` or `SymbolKind.Package`.
|
| 281 |
+
"""
|
| 282 |
+
parent = self.get_parent()
|
| 283 |
+
if parent is not None:
|
| 284 |
+
if up_to_symbol_kind is None or parent.symbol_kind != up_to_symbol_kind:
|
| 285 |
+
yield parent
|
| 286 |
+
yield from parent.iter_ancestors(up_to_symbol_kind=up_to_symbol_kind)
|
| 287 |
+
|
| 288 |
+
def get_parent(self) -> Self | None:
|
| 289 |
+
parent_root = self.symbol_root.get("parent")
|
| 290 |
+
if parent_root is None:
|
| 291 |
+
return None
|
| 292 |
+
return self.__class__(parent_root)
|
| 293 |
+
|
| 294 |
+
def find(
|
| 295 |
+
self,
|
| 296 |
+
name_path: str,
|
| 297 |
+
substring_matching: bool = False,
|
| 298 |
+
include_kinds: Sequence[SymbolKind] | None = None,
|
| 299 |
+
exclude_kinds: Sequence[SymbolKind] | None = None,
|
| 300 |
+
) -> list[Self]:
|
| 301 |
+
"""
|
| 302 |
+
Find all symbols within the symbol's subtree that match the given `name_path`.
|
| 303 |
+
The matching behavior is determined by the structure of `name_path`, which can
|
| 304 |
+
either be a simple name (e.g. "method") or a name path like "class/method" (relative name path)
|
| 305 |
+
or "/class/method" (absolute name path).
|
| 306 |
+
|
| 307 |
+
Key aspects of the name path matching behavior:
|
| 308 |
+
- Trailing slashes in `name_path` play no role and are ignored.
|
| 309 |
+
- The name of the retrieved symbols will match (either exactly or as a substring)
|
| 310 |
+
the last segment of `name_path`, while other segments will restrict the search to symbols that
|
| 311 |
+
have a desired sequence of ancestors.
|
| 312 |
+
- If there is no starting or intermediate slash in `name_path`, there is no
|
| 313 |
+
restriction on the ancestor symbols. For example, passing `method` will match
|
| 314 |
+
against symbols with name paths like `method`, `class/method`, `class/nested_class/method`, etc.
|
| 315 |
+
- If `name_path` contains a `/` but doesn't start with a `/`, the matching is restricted to symbols
|
| 316 |
+
with the same ancestors as the last segment of `name_path`. For example, passing `class/method` will match against
|
| 317 |
+
`class/method` as well as `nested_class/class/method` but not `method`.
|
| 318 |
+
- If `name_path` starts with a `/`, it will be treated as an absolute name path pattern, meaning
|
| 319 |
+
that the first segment of it must match the first segment of the symbol's name path.
|
| 320 |
+
For example, passing `/class` will match only against top-level symbols like `class` but not against `nested_class/class`.
|
| 321 |
+
Passing `/class/method` will match against `class/method` but not `nested_class/class/method` or `method`.
|
| 322 |
+
|
| 323 |
+
:param name_path: the name path to match against
|
| 324 |
+
:param substring_matching: whether to use substring matching (as opposed to exact matching)
|
| 325 |
+
of the last segment of `name_path` against the symbol name.
|
| 326 |
+
:param include_kinds: an optional sequence of ints representing the LSP symbol kind.
|
| 327 |
+
If provided, only symbols of the given kinds will be included in the result.
|
| 328 |
+
:param exclude_kinds: If provided, symbols of the given kinds will be excluded from the result.
|
| 329 |
+
"""
|
| 330 |
+
result = []
|
| 331 |
+
|
| 332 |
+
def should_include(s: "LanguageServerSymbol") -> bool:
|
| 333 |
+
if include_kinds is not None and s.symbol_kind not in include_kinds:
|
| 334 |
+
return False
|
| 335 |
+
if exclude_kinds is not None and s.symbol_kind in exclude_kinds:
|
| 336 |
+
return False
|
| 337 |
+
return LanguageServerSymbol.match_name_path(
|
| 338 |
+
name_path=name_path,
|
| 339 |
+
symbol_name_path_parts=s.get_name_path_parts(),
|
| 340 |
+
substring_matching=substring_matching,
|
| 341 |
+
)
|
| 342 |
+
|
| 343 |
+
def traverse(s: "LanguageServerSymbol") -> None:
|
| 344 |
+
if should_include(s):
|
| 345 |
+
result.append(s)
|
| 346 |
+
for c in s.iter_children():
|
| 347 |
+
traverse(c)
|
| 348 |
+
|
| 349 |
+
traverse(self)
|
| 350 |
+
return result
|
| 351 |
+
|
| 352 |
+
def to_dict(
|
| 353 |
+
self,
|
| 354 |
+
kind: bool = False,
|
| 355 |
+
location: bool = False,
|
| 356 |
+
depth: int = 0,
|
| 357 |
+
include_body: bool = False,
|
| 358 |
+
include_children_body: bool = False,
|
| 359 |
+
include_relative_path: bool = True,
|
| 360 |
+
) -> dict[str, Any]:
|
| 361 |
+
"""
|
| 362 |
+
Converts the symbol to a dictionary.
|
| 363 |
+
|
| 364 |
+
:param kind: whether to include the kind of the symbol
|
| 365 |
+
:param location: whether to include the location of the symbol
|
| 366 |
+
:param depth: the depth of the symbol
|
| 367 |
+
:param include_body: whether to include the body of the top-level symbol.
|
| 368 |
+
:param include_children_body: whether to also include the body of the children.
|
| 369 |
+
Note that the body of the children is part of the body of the parent symbol,
|
| 370 |
+
so there is usually no need to set this to True unless you want process the output
|
| 371 |
+
and pass the children without passing the parent body to the LM.
|
| 372 |
+
:param include_relative_path: whether to include the relative path of the symbol in the location
|
| 373 |
+
entry. Relative paths of the symbol's children are always excluded.
|
| 374 |
+
:return: a dictionary representation of the symbol
|
| 375 |
+
"""
|
| 376 |
+
result: dict[str, Any] = {"name": self.name, "name_path": self.get_name_path()}
|
| 377 |
+
|
| 378 |
+
if kind:
|
| 379 |
+
result["kind"] = self.kind
|
| 380 |
+
|
| 381 |
+
if location:
|
| 382 |
+
result["location"] = self.location.to_dict(include_relative_path=include_relative_path)
|
| 383 |
+
body_start_line, body_end_line = self.get_body_line_numbers()
|
| 384 |
+
result["body_location"] = {"start_line": body_start_line, "end_line": body_end_line}
|
| 385 |
+
|
| 386 |
+
if include_body:
|
| 387 |
+
if self.body is None:
|
| 388 |
+
log.warning("Requested body for symbol, but it is not present. The symbol might have been loaded with include_body=False.")
|
| 389 |
+
result["body"] = self.body
|
| 390 |
+
|
| 391 |
+
def add_children(s: Self) -> list[dict[str, Any]]:
|
| 392 |
+
children = []
|
| 393 |
+
for c in s.iter_children():
|
| 394 |
+
children.append(
|
| 395 |
+
c.to_dict(
|
| 396 |
+
kind=kind,
|
| 397 |
+
location=location,
|
| 398 |
+
depth=depth - 1,
|
| 399 |
+
include_body=include_children_body,
|
| 400 |
+
include_children_body=include_children_body,
|
| 401 |
+
# all children have the same relative path as the parent
|
| 402 |
+
include_relative_path=False,
|
| 403 |
+
)
|
| 404 |
+
)
|
| 405 |
+
return children
|
| 406 |
+
|
| 407 |
+
if depth > 0:
|
| 408 |
+
result["children"] = add_children(self)
|
| 409 |
+
|
| 410 |
+
return result
|
| 411 |
+
|
| 412 |
+
|
| 413 |
+
@dataclass
|
| 414 |
+
class ReferenceInLanguageServerSymbol(ToStringMixin):
|
| 415 |
+
"""
|
| 416 |
+
Represents the location of a reference to another symbol within a symbol/file.
|
| 417 |
+
|
| 418 |
+
The contained symbol is the symbol within which the reference is located,
|
| 419 |
+
not the symbol that is referenced.
|
| 420 |
+
"""
|
| 421 |
+
|
| 422 |
+
symbol: LanguageServerSymbol
|
| 423 |
+
"""
|
| 424 |
+
the symbol within which the reference is located
|
| 425 |
+
"""
|
| 426 |
+
line: int
|
| 427 |
+
"""
|
| 428 |
+
the line number in which the reference is located (0-based)
|
| 429 |
+
"""
|
| 430 |
+
character: int
|
| 431 |
+
"""
|
| 432 |
+
the column number in which the reference is located (0-based)
|
| 433 |
+
"""
|
| 434 |
+
|
| 435 |
+
@classmethod
|
| 436 |
+
def from_lsp_reference(cls, reference: LSPReferenceInSymbol) -> Self:
|
| 437 |
+
return cls(symbol=LanguageServerSymbol(reference.symbol), line=reference.line, character=reference.character)
|
| 438 |
+
|
| 439 |
+
def get_relative_path(self) -> str | None:
|
| 440 |
+
return self.symbol.location.relative_path
|
| 441 |
+
|
| 442 |
+
|
| 443 |
+
class LanguageServerSymbolRetriever:
|
| 444 |
+
def __init__(self, lang_server: SolidLanguageServer, agent: Union["SerenaAgent", None] = None) -> None:
|
| 445 |
+
"""
|
| 446 |
+
:param lang_server: the language server to use for symbol retrieval as well as editing operations.
|
| 447 |
+
:param agent: the agent to use (only needed for marking files as modified). You can pass None if you don't
|
| 448 |
+
need an agent to be aware of file modifications performed by the symbol manager.
|
| 449 |
+
"""
|
| 450 |
+
self._lang_server = lang_server
|
| 451 |
+
self.agent = agent
|
| 452 |
+
|
| 453 |
+
def set_language_server(self, lang_server: SolidLanguageServer) -> None:
|
| 454 |
+
"""
|
| 455 |
+
Set the language server to use for symbol retrieval and editing operations.
|
| 456 |
+
This is useful if you want to change the language server after initializing the SymbolManager.
|
| 457 |
+
"""
|
| 458 |
+
self._lang_server = lang_server
|
| 459 |
+
|
| 460 |
+
def get_language_server(self) -> SolidLanguageServer:
|
| 461 |
+
return self._lang_server
|
| 462 |
+
|
| 463 |
+
def find_by_name(
|
| 464 |
+
self,
|
| 465 |
+
name_path: str,
|
| 466 |
+
include_body: bool = False,
|
| 467 |
+
include_kinds: Sequence[SymbolKind] | None = None,
|
| 468 |
+
exclude_kinds: Sequence[SymbolKind] | None = None,
|
| 469 |
+
substring_matching: bool = False,
|
| 470 |
+
within_relative_path: str | None = None,
|
| 471 |
+
) -> list[LanguageServerSymbol]:
|
| 472 |
+
"""
|
| 473 |
+
Find all symbols that match the given name. See docstring of `Symbol.find` for more details.
|
| 474 |
+
The only parameter not mentioned there is `within_relative_path`, which can be used to restrict the search
|
| 475 |
+
to symbols within a specific file or directory.
|
| 476 |
+
"""
|
| 477 |
+
symbols: list[LanguageServerSymbol] = []
|
| 478 |
+
symbol_roots = self._lang_server.request_full_symbol_tree(within_relative_path=within_relative_path, include_body=include_body)
|
| 479 |
+
for root in symbol_roots:
|
| 480 |
+
symbols.extend(
|
| 481 |
+
LanguageServerSymbol(root).find(
|
| 482 |
+
name_path, include_kinds=include_kinds, exclude_kinds=exclude_kinds, substring_matching=substring_matching
|
| 483 |
+
)
|
| 484 |
+
)
|
| 485 |
+
return symbols
|
| 486 |
+
|
| 487 |
+
def get_document_symbols(self, relative_path: str) -> list[LanguageServerSymbol]:
|
| 488 |
+
symbol_dicts, roots = self._lang_server.request_document_symbols(relative_path, include_body=False)
|
| 489 |
+
symbols = [LanguageServerSymbol(s) for s in symbol_dicts]
|
| 490 |
+
return symbols
|
| 491 |
+
|
| 492 |
+
def find_by_location(self, location: LanguageServerSymbolLocation) -> LanguageServerSymbol | None:
|
| 493 |
+
if location.relative_path is None:
|
| 494 |
+
return None
|
| 495 |
+
symbol_dicts, roots = self._lang_server.request_document_symbols(location.relative_path, include_body=False)
|
| 496 |
+
for symbol_dict in symbol_dicts:
|
| 497 |
+
symbol = LanguageServerSymbol(symbol_dict)
|
| 498 |
+
if symbol.location == location:
|
| 499 |
+
return symbol
|
| 500 |
+
return None
|
| 501 |
+
|
| 502 |
+
def find_referencing_symbols(
|
| 503 |
+
self,
|
| 504 |
+
name_path: str,
|
| 505 |
+
relative_file_path: str,
|
| 506 |
+
include_body: bool = False,
|
| 507 |
+
include_kinds: Sequence[SymbolKind] | None = None,
|
| 508 |
+
exclude_kinds: Sequence[SymbolKind] | None = None,
|
| 509 |
+
) -> list[ReferenceInLanguageServerSymbol]:
|
| 510 |
+
"""
|
| 511 |
+
Find all symbols that reference the symbol with the given name.
|
| 512 |
+
If multiple symbols fit the name (e.g. for variables that are overwritten), will use the first one.
|
| 513 |
+
|
| 514 |
+
:param name_path: the name path of the symbol to find
|
| 515 |
+
:param relative_file_path: the relative path of the file in which the referenced symbol is defined.
|
| 516 |
+
:param include_body: whether to include the body of all symbols in the result.
|
| 517 |
+
Not recommended, as the referencing symbols will often be files, and thus the bodies will be very long.
|
| 518 |
+
:param include_kinds: which kinds of symbols to include in the result.
|
| 519 |
+
:param exclude_kinds: which kinds of symbols to exclude from the result.
|
| 520 |
+
"""
|
| 521 |
+
symbol_candidates = self.find_by_name(name_path, substring_matching=False, within_relative_path=relative_file_path)
|
| 522 |
+
if len(symbol_candidates) == 0:
|
| 523 |
+
log.warning(f"No symbol with name {name_path} found in file {relative_file_path}")
|
| 524 |
+
return []
|
| 525 |
+
if len(symbol_candidates) > 1:
|
| 526 |
+
log.error(
|
| 527 |
+
f"Found {len(symbol_candidates)} symbols with name {name_path} in file {relative_file_path}."
|
| 528 |
+
f"May be an overwritten variable, in which case you can ignore this error. Proceeding with the first one. "
|
| 529 |
+
f"Found symbols for {name_path=} in {relative_file_path=}: \n"
|
| 530 |
+
f"{json.dumps([s.location.to_dict() for s in symbol_candidates], indent=2)}"
|
| 531 |
+
)
|
| 532 |
+
symbol = symbol_candidates[0]
|
| 533 |
+
return self.find_referencing_symbols_by_location(
|
| 534 |
+
symbol.location, include_body=include_body, include_kinds=include_kinds, exclude_kinds=exclude_kinds
|
| 535 |
+
)
|
| 536 |
+
|
| 537 |
+
def find_referencing_symbols_by_location(
|
| 538 |
+
self,
|
| 539 |
+
symbol_location: LanguageServerSymbolLocation,
|
| 540 |
+
include_body: bool = False,
|
| 541 |
+
include_kinds: Sequence[SymbolKind] | None = None,
|
| 542 |
+
exclude_kinds: Sequence[SymbolKind] | None = None,
|
| 543 |
+
) -> list[ReferenceInLanguageServerSymbol]:
|
| 544 |
+
"""
|
| 545 |
+
Find all symbols that reference the symbol at the given location.
|
| 546 |
+
|
| 547 |
+
:param symbol_location: the location of the symbol for which to find references.
|
| 548 |
+
Does not need to include an end_line, as it is unused in the search.
|
| 549 |
+
:param include_body: whether to include the body of all symbols in the result.
|
| 550 |
+
Not recommended, as the referencing symbols will often be files, and thus the bodies will be very long.
|
| 551 |
+
Note: you can filter out the bodies of the children if you set include_children_body=False
|
| 552 |
+
in the to_dict method.
|
| 553 |
+
:param include_kinds: an optional sequence of ints representing the LSP symbol kind.
|
| 554 |
+
If provided, only symbols of the given kinds will be included in the result.
|
| 555 |
+
:param exclude_kinds: If provided, symbols of the given kinds will be excluded from the result.
|
| 556 |
+
Takes precedence over include_kinds.
|
| 557 |
+
:return: a list of symbols that reference the given symbol
|
| 558 |
+
"""
|
| 559 |
+
if not symbol_location.has_position_in_file():
|
| 560 |
+
raise ValueError("Symbol location does not contain a valid position in a file")
|
| 561 |
+
assert symbol_location.relative_path is not None
|
| 562 |
+
assert symbol_location.line is not None
|
| 563 |
+
assert symbol_location.column is not None
|
| 564 |
+
references = self._lang_server.request_referencing_symbols(
|
| 565 |
+
relative_file_path=symbol_location.relative_path,
|
| 566 |
+
line=symbol_location.line,
|
| 567 |
+
column=symbol_location.column,
|
| 568 |
+
include_imports=False,
|
| 569 |
+
include_self=False,
|
| 570 |
+
include_body=include_body,
|
| 571 |
+
include_file_symbols=True,
|
| 572 |
+
)
|
| 573 |
+
|
| 574 |
+
if include_kinds is not None:
|
| 575 |
+
references = [s for s in references if s.symbol["kind"] in include_kinds]
|
| 576 |
+
|
| 577 |
+
if exclude_kinds is not None:
|
| 578 |
+
references = [s for s in references if s.symbol["kind"] not in exclude_kinds]
|
| 579 |
+
|
| 580 |
+
return [ReferenceInLanguageServerSymbol.from_lsp_reference(r) for r in references]
|
| 581 |
+
|
| 582 |
+
@dataclass
|
| 583 |
+
class SymbolOverviewElement:
|
| 584 |
+
name_path: str
|
| 585 |
+
kind: int
|
| 586 |
+
|
| 587 |
+
@classmethod
|
| 588 |
+
def from_symbol(cls, symbol: LanguageServerSymbol) -> Self:
|
| 589 |
+
return cls(name_path=symbol.get_name_path(), kind=int(symbol.symbol_kind))
|
| 590 |
+
|
| 591 |
+
def get_symbol_overview(self, relative_path: str) -> dict[str, list[SymbolOverviewElement]]:
|
| 592 |
+
path_to_unified_symbols = self._lang_server.request_overview(relative_path)
|
| 593 |
+
result = {}
|
| 594 |
+
for file_path, unified_symbols in path_to_unified_symbols.items():
|
| 595 |
+
# TODO: maybe include not just top-level symbols? We could filter by kind to exclude variables
|
| 596 |
+
# The language server methods would need to be adjusted for this.
|
| 597 |
+
result[file_path] = [self.SymbolOverviewElement.from_symbol(LanguageServerSymbol(s)) for s in unified_symbols]
|
| 598 |
+
return result
|
| 599 |
+
|
| 600 |
+
|
| 601 |
+
class JetBrainsSymbol(Symbol):
|
| 602 |
+
def __init__(self, symbol_dict: dict, project: Project) -> None:
|
| 603 |
+
"""
|
| 604 |
+
:param symbol_dict: dictionary as returned by the JetBrains plugin client.
|
| 605 |
+
"""
|
| 606 |
+
self._project = project
|
| 607 |
+
self._dict = symbol_dict
|
| 608 |
+
self._cached_file_content: str | None = None
|
| 609 |
+
self._cached_body_start_position: PositionInFile | None = None
|
| 610 |
+
self._cached_body_end_position: PositionInFile | None = None
|
| 611 |
+
|
| 612 |
+
def get_relative_path(self) -> str:
|
| 613 |
+
return self._dict["relative_path"]
|
| 614 |
+
|
| 615 |
+
def get_file_content(self) -> str:
|
| 616 |
+
if self._cached_file_content is None:
|
| 617 |
+
path = os.path.join(self._project.project_root, self.get_relative_path())
|
| 618 |
+
with open(path, encoding=self._project.project_config.encoding) as f:
|
| 619 |
+
self._cached_file_content = f.read()
|
| 620 |
+
return self._cached_file_content
|
| 621 |
+
|
| 622 |
+
def is_position_in_file_available(self) -> bool:
|
| 623 |
+
return "text_range" in self._dict
|
| 624 |
+
|
| 625 |
+
def get_body_start_position(self) -> PositionInFile | None:
|
| 626 |
+
if not self.is_position_in_file_available():
|
| 627 |
+
return None
|
| 628 |
+
if self._cached_body_start_position is None:
|
| 629 |
+
pos = self._dict["text_range"]["start_pos"]
|
| 630 |
+
line, col = pos["line"], pos["col"]
|
| 631 |
+
self._cached_body_start_position = PositionInFile(line=line, col=col)
|
| 632 |
+
return self._cached_body_start_position
|
| 633 |
+
|
| 634 |
+
def get_body_end_position(self) -> PositionInFile | None:
|
| 635 |
+
if not self.is_position_in_file_available():
|
| 636 |
+
return None
|
| 637 |
+
if self._cached_body_end_position is None:
|
| 638 |
+
pos = self._dict["text_range"]["end_pos"]
|
| 639 |
+
line, col = pos["line"], pos["col"]
|
| 640 |
+
self._cached_body_end_position = PositionInFile(line=line, col=col)
|
| 641 |
+
return self._cached_body_end_position
|
| 642 |
+
|
| 643 |
+
def is_neighbouring_definition_separated_by_empty_line(self) -> bool:
|
| 644 |
+
# NOTE: Symbol types cannot really be differentiated, because types are not handled in a language-agnostic way.
|
| 645 |
+
return False
|
projects/ui/serena-new/src/serena/text_utils.py
ADDED
|
@@ -0,0 +1,368 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import fnmatch
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
import re
|
| 5 |
+
from collections.abc import Callable
|
| 6 |
+
from dataclasses import dataclass, field
|
| 7 |
+
from enum import StrEnum
|
| 8 |
+
from typing import Any, Self
|
| 9 |
+
|
| 10 |
+
from joblib import Parallel, delayed
|
| 11 |
+
|
| 12 |
+
log = logging.getLogger(__name__)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class LineType(StrEnum):
|
| 16 |
+
"""Enum for different types of lines in search results."""
|
| 17 |
+
|
| 18 |
+
MATCH = "match"
|
| 19 |
+
"""Part of the matched lines"""
|
| 20 |
+
BEFORE_MATCH = "prefix"
|
| 21 |
+
"""Lines before the match"""
|
| 22 |
+
AFTER_MATCH = "postfix"
|
| 23 |
+
"""Lines after the match"""
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
@dataclass(kw_only=True)
|
| 27 |
+
class TextLine:
|
| 28 |
+
"""Represents a line of text with information on how it relates to the match."""
|
| 29 |
+
|
| 30 |
+
line_number: int
|
| 31 |
+
line_content: str
|
| 32 |
+
match_type: LineType
|
| 33 |
+
"""Represents the type of line (match, prefix, postfix)"""
|
| 34 |
+
|
| 35 |
+
def get_display_prefix(self) -> str:
|
| 36 |
+
"""Get the display prefix for this line based on the match type."""
|
| 37 |
+
if self.match_type == LineType.MATCH:
|
| 38 |
+
return " >"
|
| 39 |
+
return "..."
|
| 40 |
+
|
| 41 |
+
def format_line(self, include_line_numbers: bool = True) -> str:
|
| 42 |
+
"""Format the line for display (e.g.,for logging or passing to an LLM).
|
| 43 |
+
|
| 44 |
+
:param include_line_numbers: Whether to include the line number in the result.
|
| 45 |
+
"""
|
| 46 |
+
prefix = self.get_display_prefix()
|
| 47 |
+
if include_line_numbers:
|
| 48 |
+
line_num = str(self.line_number).rjust(4)
|
| 49 |
+
prefix = f"{prefix}{line_num}"
|
| 50 |
+
return f"{prefix}:{self.line_content}"
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
@dataclass(kw_only=True)
|
| 54 |
+
class MatchedConsecutiveLines:
|
| 55 |
+
"""Represents a collection of consecutive lines found through some criterion in a text file or a string.
|
| 56 |
+
May include lines before, after, and matched.
|
| 57 |
+
"""
|
| 58 |
+
|
| 59 |
+
lines: list[TextLine]
|
| 60 |
+
"""All lines in the context of the match. At least one of them is of `match_type` `MATCH`."""
|
| 61 |
+
source_file_path: str | None = None
|
| 62 |
+
"""Path to the file where the match was found (Metadata)."""
|
| 63 |
+
|
| 64 |
+
# set in post-init
|
| 65 |
+
lines_before_matched: list[TextLine] = field(default_factory=list)
|
| 66 |
+
matched_lines: list[TextLine] = field(default_factory=list)
|
| 67 |
+
lines_after_matched: list[TextLine] = field(default_factory=list)
|
| 68 |
+
|
| 69 |
+
def __post_init__(self) -> None:
|
| 70 |
+
for line in self.lines:
|
| 71 |
+
if line.match_type == LineType.BEFORE_MATCH:
|
| 72 |
+
self.lines_before_matched.append(line)
|
| 73 |
+
elif line.match_type == LineType.MATCH:
|
| 74 |
+
self.matched_lines.append(line)
|
| 75 |
+
elif line.match_type == LineType.AFTER_MATCH:
|
| 76 |
+
self.lines_after_matched.append(line)
|
| 77 |
+
|
| 78 |
+
assert len(self.matched_lines) > 0, "At least one matched line is required"
|
| 79 |
+
|
| 80 |
+
@property
|
| 81 |
+
def start_line(self) -> int:
|
| 82 |
+
return self.lines[0].line_number
|
| 83 |
+
|
| 84 |
+
@property
|
| 85 |
+
def end_line(self) -> int:
|
| 86 |
+
return self.lines[-1].line_number
|
| 87 |
+
|
| 88 |
+
@property
|
| 89 |
+
def num_matched_lines(self) -> int:
|
| 90 |
+
return len(self.matched_lines)
|
| 91 |
+
|
| 92 |
+
def to_display_string(self, include_line_numbers: bool = True) -> str:
|
| 93 |
+
return "\n".join([line.format_line(include_line_numbers) for line in self.lines])
|
| 94 |
+
|
| 95 |
+
@classmethod
|
| 96 |
+
def from_file_contents(
|
| 97 |
+
cls, file_contents: str, line: int, context_lines_before: int = 0, context_lines_after: int = 0, source_file_path: str | None = None
|
| 98 |
+
) -> Self:
|
| 99 |
+
line_contents = file_contents.split("\n")
|
| 100 |
+
start_lineno = max(0, line - context_lines_before)
|
| 101 |
+
end_lineno = min(len(line_contents) - 1, line + context_lines_after)
|
| 102 |
+
text_lines: list[TextLine] = []
|
| 103 |
+
# before the line
|
| 104 |
+
for lineno in range(start_lineno, line):
|
| 105 |
+
text_lines.append(TextLine(line_number=lineno, line_content=line_contents[lineno], match_type=LineType.BEFORE_MATCH))
|
| 106 |
+
# the line
|
| 107 |
+
text_lines.append(TextLine(line_number=line, line_content=line_contents[line], match_type=LineType.MATCH))
|
| 108 |
+
# after the line
|
| 109 |
+
for lineno in range(line + 1, end_lineno + 1):
|
| 110 |
+
text_lines.append(TextLine(line_number=lineno, line_content=line_contents[lineno], match_type=LineType.AFTER_MATCH))
|
| 111 |
+
|
| 112 |
+
return cls(lines=text_lines, source_file_path=source_file_path)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def glob_to_regex(glob_pat: str) -> str:
|
| 116 |
+
regex_parts: list[str] = []
|
| 117 |
+
i = 0
|
| 118 |
+
while i < len(glob_pat):
|
| 119 |
+
ch = glob_pat[i]
|
| 120 |
+
if ch == "*":
|
| 121 |
+
regex_parts.append(".*")
|
| 122 |
+
elif ch == "?":
|
| 123 |
+
regex_parts.append(".")
|
| 124 |
+
elif ch == "\\":
|
| 125 |
+
i += 1
|
| 126 |
+
if i < len(glob_pat):
|
| 127 |
+
regex_parts.append(re.escape(glob_pat[i]))
|
| 128 |
+
else:
|
| 129 |
+
regex_parts.append("\\")
|
| 130 |
+
else:
|
| 131 |
+
regex_parts.append(re.escape(ch))
|
| 132 |
+
i += 1
|
| 133 |
+
return "".join(regex_parts)
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def search_text(
|
| 137 |
+
pattern: str,
|
| 138 |
+
content: str | None = None,
|
| 139 |
+
source_file_path: str | None = None,
|
| 140 |
+
allow_multiline_match: bool = False,
|
| 141 |
+
context_lines_before: int = 0,
|
| 142 |
+
context_lines_after: int = 0,
|
| 143 |
+
is_glob: bool = False,
|
| 144 |
+
) -> list[MatchedConsecutiveLines]:
|
| 145 |
+
"""
|
| 146 |
+
Search for a pattern in text content. Supports both regex and glob-like patterns.
|
| 147 |
+
|
| 148 |
+
:param pattern: Pattern to search for (regex or glob-like pattern)
|
| 149 |
+
:param content: The text content to search. May be None if source_file_path is provided.
|
| 150 |
+
:param source_file_path: Optional path to the source file. If content is None,
|
| 151 |
+
this has to be passed and the file will be read.
|
| 152 |
+
:param allow_multiline_match: Whether to search across multiple lines. Currently, the default
|
| 153 |
+
option (False) is very inefficient, so it is recommended to set this to True.
|
| 154 |
+
:param context_lines_before: Number of context lines to include before matches
|
| 155 |
+
:param context_lines_after: Number of context lines to include after matches
|
| 156 |
+
:param is_glob: If True, pattern is treated as a glob-like pattern (e.g., "*.py", "test_??.py")
|
| 157 |
+
and will be converted to regex internally
|
| 158 |
+
|
| 159 |
+
:return: List of `TextSearchMatch` objects
|
| 160 |
+
|
| 161 |
+
:raises: ValueError if the pattern is not valid
|
| 162 |
+
|
| 163 |
+
"""
|
| 164 |
+
if source_file_path and content is None:
|
| 165 |
+
with open(source_file_path) as f:
|
| 166 |
+
content = f.read()
|
| 167 |
+
|
| 168 |
+
if content is None:
|
| 169 |
+
raise ValueError("Pass either content or source_file_path")
|
| 170 |
+
|
| 171 |
+
matches = []
|
| 172 |
+
lines = content.splitlines()
|
| 173 |
+
total_lines = len(lines)
|
| 174 |
+
|
| 175 |
+
# Convert pattern to a compiled regex if it's a string
|
| 176 |
+
if is_glob:
|
| 177 |
+
pattern = glob_to_regex(pattern)
|
| 178 |
+
if allow_multiline_match:
|
| 179 |
+
# For multiline matches, we need to use the DOTALL flag to make '.' match newlines
|
| 180 |
+
compiled_pattern = re.compile(pattern, re.DOTALL)
|
| 181 |
+
# Search across the entire content as a single string
|
| 182 |
+
for match in compiled_pattern.finditer(content):
|
| 183 |
+
start_pos = match.start()
|
| 184 |
+
end_pos = match.end()
|
| 185 |
+
|
| 186 |
+
# Find the line numbers for the start and end positions
|
| 187 |
+
start_line_num = content[:start_pos].count("\n") + 1
|
| 188 |
+
end_line_num = content[:end_pos].count("\n") + 1
|
| 189 |
+
|
| 190 |
+
# Calculate the range of lines to include in the context
|
| 191 |
+
context_start = max(1, start_line_num - context_lines_before)
|
| 192 |
+
context_end = min(total_lines, end_line_num + context_lines_after)
|
| 193 |
+
|
| 194 |
+
# Create TextLine objects for the context
|
| 195 |
+
context_lines = []
|
| 196 |
+
for i in range(context_start - 1, context_end):
|
| 197 |
+
line_num = i + 1
|
| 198 |
+
if context_start <= line_num < start_line_num:
|
| 199 |
+
match_type = LineType.BEFORE_MATCH
|
| 200 |
+
elif end_line_num < line_num <= context_end:
|
| 201 |
+
match_type = LineType.AFTER_MATCH
|
| 202 |
+
else:
|
| 203 |
+
match_type = LineType.MATCH
|
| 204 |
+
|
| 205 |
+
context_lines.append(TextLine(line_number=line_num, line_content=lines[i], match_type=match_type))
|
| 206 |
+
|
| 207 |
+
matches.append(MatchedConsecutiveLines(lines=context_lines, source_file_path=source_file_path))
|
| 208 |
+
else:
|
| 209 |
+
# TODO: extremely inefficient! Since we currently don't use this option in SerenaAgent or LanguageServer,
|
| 210 |
+
# it is not urgent to fix, but should be either improved or the option should be removed.
|
| 211 |
+
# Search line by line, normal compile without DOTALL
|
| 212 |
+
compiled_pattern = re.compile(pattern)
|
| 213 |
+
for i, line in enumerate(lines):
|
| 214 |
+
line_num = i + 1
|
| 215 |
+
if compiled_pattern.search(line):
|
| 216 |
+
# Calculate the range of lines to include in the context
|
| 217 |
+
context_start = max(0, i - context_lines_before)
|
| 218 |
+
context_end = min(total_lines - 1, i + context_lines_after)
|
| 219 |
+
|
| 220 |
+
# Create TextLine objects for the context
|
| 221 |
+
context_lines = []
|
| 222 |
+
for j in range(context_start, context_end + 1):
|
| 223 |
+
context_line_num = j + 1
|
| 224 |
+
if j < i:
|
| 225 |
+
match_type = LineType.BEFORE_MATCH
|
| 226 |
+
elif j > i:
|
| 227 |
+
match_type = LineType.AFTER_MATCH
|
| 228 |
+
else:
|
| 229 |
+
match_type = LineType.MATCH
|
| 230 |
+
|
| 231 |
+
context_lines.append(TextLine(line_number=context_line_num, line_content=lines[j], match_type=match_type))
|
| 232 |
+
|
| 233 |
+
matches.append(MatchedConsecutiveLines(lines=context_lines, source_file_path=source_file_path))
|
| 234 |
+
|
| 235 |
+
return matches
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
def default_file_reader(file_path: str) -> str:
|
| 239 |
+
"""Reads using utf-8 encoding."""
|
| 240 |
+
with open(file_path, encoding="utf-8") as f:
|
| 241 |
+
return f.read()
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
def glob_match(pattern: str, path: str) -> bool:
|
| 245 |
+
"""
|
| 246 |
+
Match a file path against a glob pattern.
|
| 247 |
+
|
| 248 |
+
Supports standard glob patterns:
|
| 249 |
+
- * matches any number of characters except /
|
| 250 |
+
- ** matches any number of directories (zero or more)
|
| 251 |
+
- ? matches a single character except /
|
| 252 |
+
- [seq] matches any character in seq
|
| 253 |
+
|
| 254 |
+
:param pattern: Glob pattern (e.g., 'src/**/*.py', '**agent.py')
|
| 255 |
+
:param path: File path to match against
|
| 256 |
+
:return: True if path matches pattern
|
| 257 |
+
"""
|
| 258 |
+
pattern = pattern.replace("\\", "/") # Normalize backslashes to forward slashes
|
| 259 |
+
path = path.replace("\\", "/") # Normalize path backslashes to forward slashes
|
| 260 |
+
|
| 261 |
+
# Handle ** patterns that should match zero or more directories
|
| 262 |
+
if "**" in pattern:
|
| 263 |
+
# Method 1: Standard fnmatch (matches one or more directories)
|
| 264 |
+
regex1 = fnmatch.translate(pattern)
|
| 265 |
+
if re.match(regex1, path):
|
| 266 |
+
return True
|
| 267 |
+
|
| 268 |
+
# Method 2: Handle zero-directory case by removing /** entirely
|
| 269 |
+
# Convert "src/**/test.py" to "src/test.py"
|
| 270 |
+
if "/**/" in pattern:
|
| 271 |
+
zero_dir_pattern = pattern.replace("/**/", "/")
|
| 272 |
+
regex2 = fnmatch.translate(zero_dir_pattern)
|
| 273 |
+
if re.match(regex2, path):
|
| 274 |
+
return True
|
| 275 |
+
|
| 276 |
+
# Method 3: Handle leading ** case by removing **/
|
| 277 |
+
# Convert "**/test.py" to "test.py"
|
| 278 |
+
if pattern.startswith("**/"):
|
| 279 |
+
zero_dir_pattern = pattern[3:] # Remove "**/"
|
| 280 |
+
regex3 = fnmatch.translate(zero_dir_pattern)
|
| 281 |
+
if re.match(regex3, path):
|
| 282 |
+
return True
|
| 283 |
+
|
| 284 |
+
return False
|
| 285 |
+
else:
|
| 286 |
+
# Simple pattern without **, use fnmatch directly
|
| 287 |
+
return fnmatch.fnmatch(path, pattern)
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
def search_files(
|
| 291 |
+
relative_file_paths: list[str],
|
| 292 |
+
pattern: str,
|
| 293 |
+
root_path: str = "",
|
| 294 |
+
file_reader: Callable[[str], str] = default_file_reader,
|
| 295 |
+
context_lines_before: int = 0,
|
| 296 |
+
context_lines_after: int = 0,
|
| 297 |
+
paths_include_glob: str | None = None,
|
| 298 |
+
paths_exclude_glob: str | None = None,
|
| 299 |
+
) -> list[MatchedConsecutiveLines]:
|
| 300 |
+
"""
|
| 301 |
+
Search for a pattern in a list of files.
|
| 302 |
+
|
| 303 |
+
:param relative_file_paths: List of relative file paths in which to search
|
| 304 |
+
:param pattern: Pattern to search for
|
| 305 |
+
:param root_path: Root path to resolve relative paths against (by default, current working directory).
|
| 306 |
+
:param file_reader: Function to read a file, by default will just use os.open.
|
| 307 |
+
All files that can't be read by it will be skipped.
|
| 308 |
+
:param context_lines_before: Number of context lines to include before matches
|
| 309 |
+
:param context_lines_after: Number of context lines to include after matches
|
| 310 |
+
:param paths_include_glob: Optional glob pattern to include files from the list
|
| 311 |
+
:param paths_exclude_glob: Optional glob pattern to exclude files from the list
|
| 312 |
+
:return: List of MatchedConsecutiveLines objects
|
| 313 |
+
"""
|
| 314 |
+
# Pre-filter paths (done sequentially to avoid overhead)
|
| 315 |
+
# Use proper glob matching instead of gitignore patterns
|
| 316 |
+
filtered_paths = []
|
| 317 |
+
for path in relative_file_paths:
|
| 318 |
+
if paths_include_glob and not glob_match(paths_include_glob, path):
|
| 319 |
+
log.debug(f"Skipping {path}: does not match include pattern {paths_include_glob}")
|
| 320 |
+
continue
|
| 321 |
+
if paths_exclude_glob and glob_match(paths_exclude_glob, path):
|
| 322 |
+
log.debug(f"Skipping {path}: matches exclude pattern {paths_exclude_glob}")
|
| 323 |
+
continue
|
| 324 |
+
filtered_paths.append(path)
|
| 325 |
+
|
| 326 |
+
log.info(f"Processing {len(filtered_paths)} files.")
|
| 327 |
+
|
| 328 |
+
def process_single_file(path: str) -> dict[str, Any]:
|
| 329 |
+
"""Process a single file - this function will be parallelized."""
|
| 330 |
+
try:
|
| 331 |
+
abs_path = os.path.join(root_path, path)
|
| 332 |
+
file_content = file_reader(abs_path)
|
| 333 |
+
search_results = search_text(
|
| 334 |
+
pattern,
|
| 335 |
+
content=file_content,
|
| 336 |
+
source_file_path=path,
|
| 337 |
+
allow_multiline_match=True,
|
| 338 |
+
context_lines_before=context_lines_before,
|
| 339 |
+
context_lines_after=context_lines_after,
|
| 340 |
+
)
|
| 341 |
+
if len(search_results) > 0:
|
| 342 |
+
log.debug(f"Found {len(search_results)} matches in {path}")
|
| 343 |
+
return {"path": path, "results": search_results, "error": None}
|
| 344 |
+
except Exception as e:
|
| 345 |
+
log.debug(f"Error processing {path}: {e}")
|
| 346 |
+
return {"path": path, "results": [], "error": str(e)}
|
| 347 |
+
|
| 348 |
+
# Execute in parallel using joblib
|
| 349 |
+
results = Parallel(
|
| 350 |
+
n_jobs=-1,
|
| 351 |
+
backend="threading",
|
| 352 |
+
)(delayed(process_single_file)(path) for path in filtered_paths)
|
| 353 |
+
|
| 354 |
+
# Collect results and errors
|
| 355 |
+
matches = []
|
| 356 |
+
skipped_file_error_tuples = []
|
| 357 |
+
|
| 358 |
+
for result in results:
|
| 359 |
+
if result["error"]:
|
| 360 |
+
skipped_file_error_tuples.append((result["path"], result["error"]))
|
| 361 |
+
else:
|
| 362 |
+
matches.extend(result["results"])
|
| 363 |
+
|
| 364 |
+
if skipped_file_error_tuples:
|
| 365 |
+
log.debug(f"Failed to read {len(skipped_file_error_tuples)} files: {skipped_file_error_tuples}")
|
| 366 |
+
|
| 367 |
+
log.info(f"Found {len(matches)} total matches across {len(filtered_paths)} files")
|
| 368 |
+
return matches
|
projects/ui/serena-new/src/solidlsp/.gitignore
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
language_servers/static
|
projects/ui/serena-new/src/solidlsp/__init__.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ruff: noqa
|
| 2 |
+
from .ls import SolidLanguageServer
|
projects/ui/serena-new/src/solidlsp/ls.py
ADDED
|
@@ -0,0 +1,1738 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import dataclasses
|
| 2 |
+
import hashlib
|
| 3 |
+
import json
|
| 4 |
+
import logging
|
| 5 |
+
import os
|
| 6 |
+
import pathlib
|
| 7 |
+
import pickle
|
| 8 |
+
import shutil
|
| 9 |
+
import subprocess
|
| 10 |
+
import threading
|
| 11 |
+
from abc import ABC, abstractmethod
|
| 12 |
+
from collections import defaultdict
|
| 13 |
+
from collections.abc import Iterator
|
| 14 |
+
from contextlib import contextmanager
|
| 15 |
+
from copy import copy
|
| 16 |
+
from pathlib import Path, PurePath
|
| 17 |
+
from time import sleep
|
| 18 |
+
from typing import Self, Union, cast
|
| 19 |
+
|
| 20 |
+
import pathspec
|
| 21 |
+
|
| 22 |
+
from serena.text_utils import MatchedConsecutiveLines
|
| 23 |
+
from serena.util.file_system import match_path
|
| 24 |
+
from solidlsp import ls_types
|
| 25 |
+
from solidlsp.ls_config import Language, LanguageServerConfig
|
| 26 |
+
from solidlsp.ls_exceptions import SolidLSPException
|
| 27 |
+
from solidlsp.ls_handler import SolidLanguageServerHandler
|
| 28 |
+
from solidlsp.ls_logger import LanguageServerLogger
|
| 29 |
+
from solidlsp.ls_types import UnifiedSymbolInformation
|
| 30 |
+
from solidlsp.ls_utils import FileUtils, PathUtils, TextUtils
|
| 31 |
+
from solidlsp.lsp_protocol_handler import lsp_types
|
| 32 |
+
from solidlsp.lsp_protocol_handler import lsp_types as LSPTypes
|
| 33 |
+
from solidlsp.lsp_protocol_handler.lsp_constants import LSPConstants
|
| 34 |
+
from solidlsp.lsp_protocol_handler.lsp_types import Definition, DefinitionParams, LocationLink, SymbolKind
|
| 35 |
+
from solidlsp.lsp_protocol_handler.server import (
|
| 36 |
+
LSPError,
|
| 37 |
+
ProcessLaunchInfo,
|
| 38 |
+
StringDict,
|
| 39 |
+
)
|
| 40 |
+
from solidlsp.settings import SolidLSPSettings
|
| 41 |
+
|
| 42 |
+
GenericDocumentSymbol = Union[LSPTypes.DocumentSymbol, LSPTypes.SymbolInformation, ls_types.UnifiedSymbolInformation]
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
@dataclasses.dataclass(kw_only=True)
|
| 46 |
+
class ReferenceInSymbol:
|
| 47 |
+
"""A symbol retrieved when requesting reference to a symbol, together with the location of the reference"""
|
| 48 |
+
|
| 49 |
+
symbol: ls_types.UnifiedSymbolInformation
|
| 50 |
+
line: int
|
| 51 |
+
character: int
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
@dataclasses.dataclass
|
| 55 |
+
class LSPFileBuffer:
|
| 56 |
+
"""
|
| 57 |
+
This class is used to store the contents of an open LSP file in memory.
|
| 58 |
+
"""
|
| 59 |
+
|
| 60 |
+
# uri of the file
|
| 61 |
+
uri: str
|
| 62 |
+
|
| 63 |
+
# The contents of the file
|
| 64 |
+
contents: str
|
| 65 |
+
|
| 66 |
+
# The version of the file
|
| 67 |
+
version: int
|
| 68 |
+
|
| 69 |
+
# The language id of the file
|
| 70 |
+
language_id: str
|
| 71 |
+
|
| 72 |
+
# reference count of the file
|
| 73 |
+
ref_count: int
|
| 74 |
+
|
| 75 |
+
content_hash: str = ""
|
| 76 |
+
|
| 77 |
+
def __post_init__(self):
|
| 78 |
+
self.content_hash = hashlib.md5(self.contents.encode("utf-8")).hexdigest()
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class SolidLanguageServer(ABC):
|
| 82 |
+
"""
|
| 83 |
+
The LanguageServer class provides a language agnostic interface to the Language Server Protocol.
|
| 84 |
+
It is used to communicate with Language Servers of different programming languages.
|
| 85 |
+
"""
|
| 86 |
+
|
| 87 |
+
CACHE_FOLDER_NAME = "cache"
|
| 88 |
+
|
| 89 |
+
# To be overridden and extended by subclasses
|
| 90 |
+
def is_ignored_dirname(self, dirname: str) -> bool:
|
| 91 |
+
"""
|
| 92 |
+
A language-specific condition for directories that should always be ignored. For example, venv
|
| 93 |
+
in Python and node_modules in JS/TS should be ignored always.
|
| 94 |
+
"""
|
| 95 |
+
return dirname.startswith(".")
|
| 96 |
+
|
| 97 |
+
@classmethod
|
| 98 |
+
def ls_resources_dir(cls, solidlsp_settings: SolidLSPSettings, mkdir: bool = True) -> str:
|
| 99 |
+
"""
|
| 100 |
+
Returns the directory where the language server resources are downloaded.
|
| 101 |
+
This is used to store language server binaries, configuration files, etc.
|
| 102 |
+
"""
|
| 103 |
+
result = os.path.join(solidlsp_settings.ls_resources_dir, cls.__name__)
|
| 104 |
+
|
| 105 |
+
# Migration of previously downloaded LS resources that were downloaded to a subdir of solidlsp instead of to the user's home
|
| 106 |
+
pre_migration_ls_resources_dir = os.path.join(os.path.dirname(__file__), "language_servers", "static", cls.__name__)
|
| 107 |
+
if os.path.exists(pre_migration_ls_resources_dir):
|
| 108 |
+
if os.path.exists(result):
|
| 109 |
+
# if the directory already exists, we just remove the old resources
|
| 110 |
+
shutil.rmtree(result, ignore_errors=True)
|
| 111 |
+
else:
|
| 112 |
+
# move old resources to the new location
|
| 113 |
+
shutil.move(pre_migration_ls_resources_dir, result)
|
| 114 |
+
if mkdir:
|
| 115 |
+
os.makedirs(result, exist_ok=True)
|
| 116 |
+
return result
|
| 117 |
+
|
| 118 |
+
@classmethod
|
| 119 |
+
def create(
|
| 120 |
+
cls,
|
| 121 |
+
config: LanguageServerConfig,
|
| 122 |
+
logger: LanguageServerLogger,
|
| 123 |
+
repository_root_path: str,
|
| 124 |
+
timeout: float | None = None,
|
| 125 |
+
solidlsp_settings: SolidLSPSettings | None = None,
|
| 126 |
+
) -> "SolidLanguageServer":
|
| 127 |
+
"""
|
| 128 |
+
Creates a language specific LanguageServer instance based on the given configuration, and appropriate settings for the programming language.
|
| 129 |
+
|
| 130 |
+
If language is Java, then ensure that jdk-17.0.6 or higher is installed, `java` is in PATH, and JAVA_HOME is set to the installation directory.
|
| 131 |
+
If language is JS/TS, then ensure that node (v18.16.0 or higher) is installed and in PATH.
|
| 132 |
+
|
| 133 |
+
:param repository_root_path: The root path of the repository.
|
| 134 |
+
:param config: The Multilspy configuration.
|
| 135 |
+
:param logger: The logger to use.
|
| 136 |
+
:param timeout: the timeout for requests to the language server. If None, no timeout will be used.
|
| 137 |
+
:return LanguageServer: A language specific LanguageServer instance.
|
| 138 |
+
"""
|
| 139 |
+
ls: SolidLanguageServer
|
| 140 |
+
if solidlsp_settings is None:
|
| 141 |
+
solidlsp_settings = SolidLSPSettings()
|
| 142 |
+
|
| 143 |
+
if config.code_language == Language.PYTHON:
|
| 144 |
+
from solidlsp.language_servers.pyright_server import (
|
| 145 |
+
PyrightServer,
|
| 146 |
+
)
|
| 147 |
+
|
| 148 |
+
ls = PyrightServer(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 149 |
+
elif config.code_language == Language.PYTHON_JEDI:
|
| 150 |
+
from solidlsp.language_servers.jedi_server import JediServer
|
| 151 |
+
|
| 152 |
+
ls = JediServer(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 153 |
+
elif config.code_language == Language.JAVA:
|
| 154 |
+
from solidlsp.language_servers.eclipse_jdtls import (
|
| 155 |
+
EclipseJDTLS,
|
| 156 |
+
)
|
| 157 |
+
|
| 158 |
+
ls = EclipseJDTLS(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 159 |
+
|
| 160 |
+
elif config.code_language == Language.KOTLIN:
|
| 161 |
+
from solidlsp.language_servers.kotlin_language_server import (
|
| 162 |
+
KotlinLanguageServer,
|
| 163 |
+
)
|
| 164 |
+
|
| 165 |
+
ls = KotlinLanguageServer(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 166 |
+
|
| 167 |
+
elif config.code_language == Language.RUST:
|
| 168 |
+
from solidlsp.language_servers.rust_analyzer import (
|
| 169 |
+
RustAnalyzer,
|
| 170 |
+
)
|
| 171 |
+
|
| 172 |
+
ls = RustAnalyzer(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 173 |
+
|
| 174 |
+
elif config.code_language == Language.CSHARP:
|
| 175 |
+
from solidlsp.language_servers.csharp_language_server import CSharpLanguageServer
|
| 176 |
+
|
| 177 |
+
ls = CSharpLanguageServer(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 178 |
+
elif config.code_language == Language.CSHARP_OMNISHARP:
|
| 179 |
+
from solidlsp.language_servers.omnisharp import OmniSharp
|
| 180 |
+
|
| 181 |
+
ls = OmniSharp(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 182 |
+
elif config.code_language == Language.TYPESCRIPT:
|
| 183 |
+
from solidlsp.language_servers.typescript_language_server import (
|
| 184 |
+
TypeScriptLanguageServer,
|
| 185 |
+
)
|
| 186 |
+
|
| 187 |
+
ls = TypeScriptLanguageServer(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 188 |
+
elif config.code_language == Language.TYPESCRIPT_VTS:
|
| 189 |
+
# VTS based Language Server implementation, need to experiment to see if it improves performance
|
| 190 |
+
from solidlsp.language_servers.vts_language_server import VtsLanguageServer
|
| 191 |
+
|
| 192 |
+
ls = VtsLanguageServer(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 193 |
+
elif config.code_language == Language.GO:
|
| 194 |
+
from solidlsp.language_servers.gopls import Gopls
|
| 195 |
+
|
| 196 |
+
ls = Gopls(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 197 |
+
|
| 198 |
+
elif config.code_language == Language.RUBY:
|
| 199 |
+
from solidlsp.language_servers.ruby_lsp import RubyLsp
|
| 200 |
+
|
| 201 |
+
ls = RubyLsp(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 202 |
+
|
| 203 |
+
elif config.code_language == Language.RUBY_SOLARGRAPH:
|
| 204 |
+
from solidlsp.language_servers.solargraph import Solargraph
|
| 205 |
+
|
| 206 |
+
ls = Solargraph(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 207 |
+
|
| 208 |
+
elif config.code_language == Language.DART:
|
| 209 |
+
from solidlsp.language_servers.dart_language_server import DartLanguageServer
|
| 210 |
+
|
| 211 |
+
ls = DartLanguageServer(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 212 |
+
|
| 213 |
+
elif config.code_language == Language.CPP:
|
| 214 |
+
from solidlsp.language_servers.clangd_language_server import ClangdLanguageServer
|
| 215 |
+
|
| 216 |
+
ls = ClangdLanguageServer(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 217 |
+
|
| 218 |
+
elif config.code_language == Language.PHP:
|
| 219 |
+
from solidlsp.language_servers.intelephense import Intelephense
|
| 220 |
+
|
| 221 |
+
ls = Intelephense(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 222 |
+
|
| 223 |
+
elif config.code_language == Language.R:
|
| 224 |
+
from solidlsp.language_servers.r_language_server import RLanguageServer
|
| 225 |
+
|
| 226 |
+
ls = RLanguageServer(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 227 |
+
|
| 228 |
+
elif config.code_language == Language.CLOJURE:
|
| 229 |
+
from solidlsp.language_servers.clojure_lsp import ClojureLSP
|
| 230 |
+
|
| 231 |
+
ls = ClojureLSP(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 232 |
+
|
| 233 |
+
elif config.code_language == Language.ELIXIR:
|
| 234 |
+
from solidlsp.language_servers.elixir_tools.elixir_tools import ElixirTools
|
| 235 |
+
|
| 236 |
+
ls = ElixirTools(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 237 |
+
|
| 238 |
+
elif config.code_language == Language.TERRAFORM:
|
| 239 |
+
from solidlsp.language_servers.terraform_ls import TerraformLS
|
| 240 |
+
|
| 241 |
+
ls = TerraformLS(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 242 |
+
|
| 243 |
+
elif config.code_language == Language.SWIFT:
|
| 244 |
+
from solidlsp.language_servers.sourcekit_lsp import SourceKitLSP
|
| 245 |
+
|
| 246 |
+
ls = SourceKitLSP(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 247 |
+
|
| 248 |
+
elif config.code_language == Language.BASH:
|
| 249 |
+
from solidlsp.language_servers.bash_language_server import BashLanguageServer
|
| 250 |
+
|
| 251 |
+
ls = BashLanguageServer(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 252 |
+
|
| 253 |
+
elif config.code_language == Language.ZIG:
|
| 254 |
+
from solidlsp.language_servers.zls import ZigLanguageServer
|
| 255 |
+
|
| 256 |
+
ls = ZigLanguageServer(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 257 |
+
|
| 258 |
+
elif config.code_language == Language.NIX:
|
| 259 |
+
from solidlsp.language_servers.nixd_ls import NixLanguageServer
|
| 260 |
+
|
| 261 |
+
ls = NixLanguageServer(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 262 |
+
|
| 263 |
+
elif config.code_language == Language.LUA:
|
| 264 |
+
from solidlsp.language_servers.lua_ls import LuaLanguageServer
|
| 265 |
+
|
| 266 |
+
ls = LuaLanguageServer(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 267 |
+
|
| 268 |
+
elif config.code_language == Language.ERLANG:
|
| 269 |
+
from solidlsp.language_servers.erlang_language_server import ErlangLanguageServer
|
| 270 |
+
|
| 271 |
+
ls = ErlangLanguageServer(config, logger, repository_root_path, solidlsp_settings=solidlsp_settings)
|
| 272 |
+
|
| 273 |
+
else:
|
| 274 |
+
logger.log(f"Language {config.code_language} is not supported", logging.ERROR)
|
| 275 |
+
raise SolidLSPException(f"Language {config.code_language} is not supported")
|
| 276 |
+
|
| 277 |
+
ls.set_request_timeout(timeout)
|
| 278 |
+
return ls
|
| 279 |
+
|
| 280 |
+
def __init__(
|
| 281 |
+
self,
|
| 282 |
+
config: LanguageServerConfig,
|
| 283 |
+
logger: LanguageServerLogger,
|
| 284 |
+
repository_root_path: str,
|
| 285 |
+
process_launch_info: ProcessLaunchInfo,
|
| 286 |
+
language_id: str,
|
| 287 |
+
solidlsp_settings: SolidLSPSettings,
|
| 288 |
+
):
|
| 289 |
+
"""
|
| 290 |
+
Initializes a LanguageServer instance.
|
| 291 |
+
|
| 292 |
+
Do not instantiate this class directly. Use `LanguageServer.create` method instead.
|
| 293 |
+
|
| 294 |
+
:param config: The Multilspy configuration.
|
| 295 |
+
:param logger: The logger to use.
|
| 296 |
+
:param repository_root_path: The root path of the repository.
|
| 297 |
+
:param process_launch_info: Each language server has a specific command used to start the server.
|
| 298 |
+
This parameter is the command to launch the language server process.
|
| 299 |
+
The command must pass appropriate flags to the binary, so that it runs in the stdio mode,
|
| 300 |
+
as opposed to HTTP, TCP modes supported by some language servers.
|
| 301 |
+
"""
|
| 302 |
+
self._solidlsp_settings = solidlsp_settings
|
| 303 |
+
self.logger = logger
|
| 304 |
+
self.repository_root_path: str = repository_root_path
|
| 305 |
+
self.logger.log(
|
| 306 |
+
f"Creating language server instance for {repository_root_path=} with {language_id=} and process launch info: {process_launch_info}",
|
| 307 |
+
logging.DEBUG,
|
| 308 |
+
)
|
| 309 |
+
|
| 310 |
+
self.language_id = language_id
|
| 311 |
+
self.open_file_buffers: dict[str, LSPFileBuffer] = {}
|
| 312 |
+
self.language = Language(language_id)
|
| 313 |
+
|
| 314 |
+
# load cache first to prevent any racing conditions due to asyncio stuff
|
| 315 |
+
self._document_symbols_cache: dict[
|
| 316 |
+
str, tuple[str, tuple[list[ls_types.UnifiedSymbolInformation], list[ls_types.UnifiedSymbolInformation]]]
|
| 317 |
+
] = {}
|
| 318 |
+
"""Maps file paths to a tuple of (file_content_hash, result_of_request_document_symbols)"""
|
| 319 |
+
self._cache_lock = threading.Lock()
|
| 320 |
+
self._cache_has_changed: bool = False
|
| 321 |
+
self.load_cache()
|
| 322 |
+
|
| 323 |
+
self.server_started = False
|
| 324 |
+
self.completions_available = threading.Event()
|
| 325 |
+
if config.trace_lsp_communication:
|
| 326 |
+
|
| 327 |
+
def logging_fn(source: str, target: str, msg: StringDict | str):
|
| 328 |
+
self.logger.log(f"LSP: {source} -> {target}: {msg!s}", self.logger.logger.level)
|
| 329 |
+
|
| 330 |
+
else:
|
| 331 |
+
logging_fn = None
|
| 332 |
+
|
| 333 |
+
# cmd is obtained from the child classes, which provide the language specific command to start the language server
|
| 334 |
+
# LanguageServerHandler provides the functionality to start the language server and communicate with it
|
| 335 |
+
self.logger.log(
|
| 336 |
+
f"Creating language server instance with {language_id=} and process launch info: {process_launch_info}", logging.DEBUG
|
| 337 |
+
)
|
| 338 |
+
self.server = SolidLanguageServerHandler(
|
| 339 |
+
process_launch_info,
|
| 340 |
+
logger=logging_fn,
|
| 341 |
+
start_independent_lsp_process=config.start_independent_lsp_process,
|
| 342 |
+
)
|
| 343 |
+
|
| 344 |
+
# Set up the pathspec matcher for the ignored paths
|
| 345 |
+
# for all absolute paths in ignored_paths, convert them to relative paths
|
| 346 |
+
processed_patterns = []
|
| 347 |
+
for pattern in set(config.ignored_paths):
|
| 348 |
+
# Normalize separators (pathspec expects forward slashes)
|
| 349 |
+
pattern = pattern.replace(os.path.sep, "/")
|
| 350 |
+
processed_patterns.append(pattern)
|
| 351 |
+
self.logger.log(f"Processing {len(processed_patterns)} ignored paths from the config", logging.DEBUG)
|
| 352 |
+
|
| 353 |
+
# Create a pathspec matcher from the processed patterns
|
| 354 |
+
self._ignore_spec = pathspec.PathSpec.from_lines(pathspec.patterns.GitWildMatchPattern, processed_patterns)
|
| 355 |
+
|
| 356 |
+
self._server_context = None
|
| 357 |
+
self._request_timeout: float | None = None
|
| 358 |
+
|
| 359 |
+
self._has_waited_for_cross_file_references = False
|
| 360 |
+
|
| 361 |
+
def _get_wait_time_for_cross_file_referencing(self) -> float:
|
| 362 |
+
"""Meant to be overridden by subclasses for LS that don't have a reliable "finished initializing" signal.
|
| 363 |
+
|
| 364 |
+
LS may return incomplete results on calls to `request_references` (only references found in the same file),
|
| 365 |
+
if the LS is not fully initialized yet.
|
| 366 |
+
"""
|
| 367 |
+
return 2
|
| 368 |
+
|
| 369 |
+
def set_request_timeout(self, timeout: float | None) -> None:
|
| 370 |
+
"""
|
| 371 |
+
:param timeout: the timeout, in seconds, for requests to the language server.
|
| 372 |
+
"""
|
| 373 |
+
self.server.set_request_timeout(timeout)
|
| 374 |
+
|
| 375 |
+
def get_ignore_spec(self) -> pathspec.PathSpec:
|
| 376 |
+
"""Returns the pathspec matcher for the paths that were configured to be ignored through
|
| 377 |
+
the multilspy config.
|
| 378 |
+
|
| 379 |
+
This is is a subset of the full language-specific ignore spec that determines
|
| 380 |
+
which files are relevant for the language server.
|
| 381 |
+
|
| 382 |
+
This matcher is useful for operations outside of the language server,
|
| 383 |
+
such as when searching for relevant non-language files in the project.
|
| 384 |
+
"""
|
| 385 |
+
return self._ignore_spec
|
| 386 |
+
|
| 387 |
+
def is_ignored_path(self, relative_path: str, ignore_unsupported_files: bool = True) -> bool:
|
| 388 |
+
"""
|
| 389 |
+
Determine if a path should be ignored based on file type
|
| 390 |
+
and ignore patterns.
|
| 391 |
+
|
| 392 |
+
:param relative_path: Relative path to check
|
| 393 |
+
:param ignore_unsupported_files: whether files that are not supported source files should be ignored
|
| 394 |
+
|
| 395 |
+
:return: True if the path should be ignored, False otherwise
|
| 396 |
+
"""
|
| 397 |
+
abs_path = os.path.join(self.repository_root_path, relative_path)
|
| 398 |
+
if not os.path.exists(abs_path):
|
| 399 |
+
raise FileNotFoundError(f"File {abs_path} not found, the ignore check cannot be performed")
|
| 400 |
+
|
| 401 |
+
# Check file extension if it's a file
|
| 402 |
+
is_file = os.path.isfile(abs_path)
|
| 403 |
+
if is_file and ignore_unsupported_files:
|
| 404 |
+
fn_matcher = self.language.get_source_fn_matcher()
|
| 405 |
+
if not fn_matcher.is_relevant_filename(abs_path):
|
| 406 |
+
return True
|
| 407 |
+
|
| 408 |
+
# Create normalized path for consistent handling
|
| 409 |
+
rel_path = Path(relative_path)
|
| 410 |
+
|
| 411 |
+
# Check each part of the path against always fulfilled ignore conditions
|
| 412 |
+
dir_parts = rel_path.parts
|
| 413 |
+
if is_file:
|
| 414 |
+
dir_parts = dir_parts[:-1]
|
| 415 |
+
for part in dir_parts:
|
| 416 |
+
if not part: # Skip empty parts (e.g., from leading '/')
|
| 417 |
+
continue
|
| 418 |
+
if self.is_ignored_dirname(part):
|
| 419 |
+
return True
|
| 420 |
+
|
| 421 |
+
return match_path(relative_path, self.get_ignore_spec(), root_path=self.repository_root_path)
|
| 422 |
+
|
| 423 |
+
def _shutdown(self, timeout: float = 5.0):
|
| 424 |
+
"""
|
| 425 |
+
A robust shutdown process designed to terminate cleanly on all platforms, including Windows,
|
| 426 |
+
by explicitly closing all I/O pipes.
|
| 427 |
+
"""
|
| 428 |
+
if not self.server.is_running():
|
| 429 |
+
self.logger.log("Server process not running, skipping shutdown.", logging.DEBUG)
|
| 430 |
+
return
|
| 431 |
+
|
| 432 |
+
self.logger.log(f"Initiating final robust shutdown with a {timeout}s timeout...", logging.INFO)
|
| 433 |
+
process = self.server.process
|
| 434 |
+
|
| 435 |
+
# --- Main Shutdown Logic ---
|
| 436 |
+
# Stage 1: Graceful Termination Request
|
| 437 |
+
# Send LSP shutdown and close stdin to signal no more input.
|
| 438 |
+
try:
|
| 439 |
+
self.logger.log("Sending LSP shutdown request...", logging.DEBUG)
|
| 440 |
+
# Use a thread to timeout the LSP shutdown call since it can hang
|
| 441 |
+
shutdown_thread = threading.Thread(target=self.server.shutdown)
|
| 442 |
+
shutdown_thread.daemon = True
|
| 443 |
+
shutdown_thread.start()
|
| 444 |
+
shutdown_thread.join(timeout=2.0) # 2 second timeout for LSP shutdown
|
| 445 |
+
|
| 446 |
+
if shutdown_thread.is_alive():
|
| 447 |
+
self.logger.log("LSP shutdown request timed out, proceeding to terminate...", logging.DEBUG)
|
| 448 |
+
else:
|
| 449 |
+
self.logger.log("LSP shutdown request completed.", logging.DEBUG)
|
| 450 |
+
|
| 451 |
+
if process.stdin and not process.stdin.is_closing():
|
| 452 |
+
process.stdin.close()
|
| 453 |
+
self.logger.log("Stage 1 shutdown complete.", logging.DEBUG)
|
| 454 |
+
except Exception as e:
|
| 455 |
+
self.logger.log(f"Exception during graceful shutdown: {e}", logging.DEBUG)
|
| 456 |
+
# Ignore errors here, we are proceeding to terminate anyway.
|
| 457 |
+
|
| 458 |
+
# Stage 2: Terminate and Wait for Process to Exit
|
| 459 |
+
self.logger.log(f"Terminating process {process.pid}, current status: {process.poll()}", logging.DEBUG)
|
| 460 |
+
process.terminate()
|
| 461 |
+
|
| 462 |
+
# Stage 3: Wait for process termination with timeout
|
| 463 |
+
try:
|
| 464 |
+
self.logger.log(f"Waiting for process {process.pid} to terminate...", logging.DEBUG)
|
| 465 |
+
exit_code = process.wait(timeout=timeout)
|
| 466 |
+
self.logger.log(f"Language server process terminated successfully with exit code {exit_code}.", logging.INFO)
|
| 467 |
+
except subprocess.TimeoutExpired:
|
| 468 |
+
# If termination failed, forcefully kill the process
|
| 469 |
+
self.logger.log(f"Process {process.pid} termination timed out, killing process forcefully...", logging.WARNING)
|
| 470 |
+
process.kill()
|
| 471 |
+
try:
|
| 472 |
+
exit_code = process.wait(timeout=2.0)
|
| 473 |
+
self.logger.log(f"Language server process killed successfully with exit code {exit_code}.", logging.INFO)
|
| 474 |
+
except subprocess.TimeoutExpired:
|
| 475 |
+
self.logger.log(f"Process {process.pid} could not be killed within timeout.", logging.ERROR)
|
| 476 |
+
except Exception as e:
|
| 477 |
+
self.logger.log(f"Error during process shutdown: {e}", logging.ERROR)
|
| 478 |
+
|
| 479 |
+
@contextmanager
|
| 480 |
+
def start_server(self) -> Iterator["SolidLanguageServer"]:
|
| 481 |
+
self.start()
|
| 482 |
+
yield self
|
| 483 |
+
self.stop()
|
| 484 |
+
|
| 485 |
+
def _start_server_process(self) -> None:
|
| 486 |
+
self.server_started = True
|
| 487 |
+
self._start_server()
|
| 488 |
+
|
| 489 |
+
@abstractmethod
|
| 490 |
+
def _start_server(self):
|
| 491 |
+
pass
|
| 492 |
+
|
| 493 |
+
@contextmanager
|
| 494 |
+
def open_file(self, relative_file_path: str) -> Iterator[LSPFileBuffer]:
|
| 495 |
+
"""
|
| 496 |
+
Open a file in the Language Server. This is required before making any requests to the Language Server.
|
| 497 |
+
|
| 498 |
+
:param relative_file_path: The relative path of the file to open.
|
| 499 |
+
"""
|
| 500 |
+
if not self.server_started:
|
| 501 |
+
self.logger.log(
|
| 502 |
+
"open_file called before Language Server started",
|
| 503 |
+
logging.ERROR,
|
| 504 |
+
)
|
| 505 |
+
raise SolidLSPException("Language Server not started")
|
| 506 |
+
|
| 507 |
+
absolute_file_path = str(PurePath(self.repository_root_path, relative_file_path))
|
| 508 |
+
uri = pathlib.Path(absolute_file_path).as_uri()
|
| 509 |
+
|
| 510 |
+
if uri in self.open_file_buffers:
|
| 511 |
+
assert self.open_file_buffers[uri].uri == uri
|
| 512 |
+
assert self.open_file_buffers[uri].ref_count >= 1
|
| 513 |
+
|
| 514 |
+
self.open_file_buffers[uri].ref_count += 1
|
| 515 |
+
yield self.open_file_buffers[uri]
|
| 516 |
+
self.open_file_buffers[uri].ref_count -= 1
|
| 517 |
+
else:
|
| 518 |
+
contents = FileUtils.read_file(self.logger, absolute_file_path)
|
| 519 |
+
|
| 520 |
+
version = 0
|
| 521 |
+
self.open_file_buffers[uri] = LSPFileBuffer(uri, contents, version, self.language_id, 1)
|
| 522 |
+
|
| 523 |
+
self.server.notify.did_open_text_document(
|
| 524 |
+
{
|
| 525 |
+
LSPConstants.TEXT_DOCUMENT: {
|
| 526 |
+
LSPConstants.URI: uri,
|
| 527 |
+
LSPConstants.LANGUAGE_ID: self.language_id,
|
| 528 |
+
LSPConstants.VERSION: 0,
|
| 529 |
+
LSPConstants.TEXT: contents,
|
| 530 |
+
}
|
| 531 |
+
}
|
| 532 |
+
)
|
| 533 |
+
yield self.open_file_buffers[uri]
|
| 534 |
+
self.open_file_buffers[uri].ref_count -= 1
|
| 535 |
+
|
| 536 |
+
if self.open_file_buffers[uri].ref_count == 0:
|
| 537 |
+
self.server.notify.did_close_text_document(
|
| 538 |
+
{
|
| 539 |
+
LSPConstants.TEXT_DOCUMENT: {
|
| 540 |
+
LSPConstants.URI: uri,
|
| 541 |
+
}
|
| 542 |
+
}
|
| 543 |
+
)
|
| 544 |
+
del self.open_file_buffers[uri]
|
| 545 |
+
|
| 546 |
+
def insert_text_at_position(self, relative_file_path: str, line: int, column: int, text_to_be_inserted: str) -> ls_types.Position:
|
| 547 |
+
"""
|
| 548 |
+
Insert text at the given line and column in the given file and return
|
| 549 |
+
the updated cursor position after inserting the text.
|
| 550 |
+
|
| 551 |
+
:param relative_file_path: The relative path of the file to open.
|
| 552 |
+
:param line: The line number at which text should be inserted.
|
| 553 |
+
:param column: The column number at which text should be inserted.
|
| 554 |
+
:param text_to_be_inserted: The text to insert.
|
| 555 |
+
"""
|
| 556 |
+
if not self.server_started:
|
| 557 |
+
self.logger.log(
|
| 558 |
+
"insert_text_at_position called before Language Server started",
|
| 559 |
+
logging.ERROR,
|
| 560 |
+
)
|
| 561 |
+
raise SolidLSPException("Language Server not started")
|
| 562 |
+
|
| 563 |
+
absolute_file_path = str(PurePath(self.repository_root_path, relative_file_path))
|
| 564 |
+
uri = pathlib.Path(absolute_file_path).as_uri()
|
| 565 |
+
|
| 566 |
+
# Ensure the file is open
|
| 567 |
+
assert uri in self.open_file_buffers
|
| 568 |
+
|
| 569 |
+
file_buffer = self.open_file_buffers[uri]
|
| 570 |
+
file_buffer.version += 1
|
| 571 |
+
|
| 572 |
+
new_contents, new_l, new_c = TextUtils.insert_text_at_position(file_buffer.contents, line, column, text_to_be_inserted)
|
| 573 |
+
file_buffer.contents = new_contents
|
| 574 |
+
self.server.notify.did_change_text_document(
|
| 575 |
+
{
|
| 576 |
+
LSPConstants.TEXT_DOCUMENT: {
|
| 577 |
+
LSPConstants.VERSION: file_buffer.version,
|
| 578 |
+
LSPConstants.URI: file_buffer.uri,
|
| 579 |
+
},
|
| 580 |
+
LSPConstants.CONTENT_CHANGES: [
|
| 581 |
+
{
|
| 582 |
+
LSPConstants.RANGE: {
|
| 583 |
+
"start": {"line": line, "character": column},
|
| 584 |
+
"end": {"line": line, "character": column},
|
| 585 |
+
},
|
| 586 |
+
"text": text_to_be_inserted,
|
| 587 |
+
}
|
| 588 |
+
],
|
| 589 |
+
}
|
| 590 |
+
)
|
| 591 |
+
return ls_types.Position(line=new_l, character=new_c)
|
| 592 |
+
|
| 593 |
+
def delete_text_between_positions(
|
| 594 |
+
self,
|
| 595 |
+
relative_file_path: str,
|
| 596 |
+
start: ls_types.Position,
|
| 597 |
+
end: ls_types.Position,
|
| 598 |
+
) -> str:
|
| 599 |
+
"""
|
| 600 |
+
Delete text between the given start and end positions in the given file and return the deleted text.
|
| 601 |
+
"""
|
| 602 |
+
if not self.server_started:
|
| 603 |
+
self.logger.log(
|
| 604 |
+
"insert_text_at_position called before Language Server started",
|
| 605 |
+
logging.ERROR,
|
| 606 |
+
)
|
| 607 |
+
raise SolidLSPException("Language Server not started")
|
| 608 |
+
|
| 609 |
+
absolute_file_path = str(PurePath(self.repository_root_path, relative_file_path))
|
| 610 |
+
uri = pathlib.Path(absolute_file_path).as_uri()
|
| 611 |
+
|
| 612 |
+
# Ensure the file is open
|
| 613 |
+
assert uri in self.open_file_buffers
|
| 614 |
+
|
| 615 |
+
file_buffer = self.open_file_buffers[uri]
|
| 616 |
+
file_buffer.version += 1
|
| 617 |
+
new_contents, deleted_text = TextUtils.delete_text_between_positions(
|
| 618 |
+
file_buffer.contents, start_line=start["line"], start_col=start["character"], end_line=end["line"], end_col=end["character"]
|
| 619 |
+
)
|
| 620 |
+
file_buffer.contents = new_contents
|
| 621 |
+
self.server.notify.did_change_text_document(
|
| 622 |
+
{
|
| 623 |
+
LSPConstants.TEXT_DOCUMENT: {
|
| 624 |
+
LSPConstants.VERSION: file_buffer.version,
|
| 625 |
+
LSPConstants.URI: file_buffer.uri,
|
| 626 |
+
},
|
| 627 |
+
LSPConstants.CONTENT_CHANGES: [{LSPConstants.RANGE: {"start": start, "end": end}, "text": ""}],
|
| 628 |
+
}
|
| 629 |
+
)
|
| 630 |
+
return deleted_text
|
| 631 |
+
|
| 632 |
+
def _send_definition_request(self, definition_params: DefinitionParams) -> Definition | list[LocationLink] | None:
|
| 633 |
+
return self.server.send.definition(definition_params)
|
| 634 |
+
|
| 635 |
+
def request_definition(self, relative_file_path: str, line: int, column: int) -> list[ls_types.Location]:
|
| 636 |
+
"""
|
| 637 |
+
Raise a [textDocument/definition](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_definition) request to the Language Server
|
| 638 |
+
for the symbol at the given line and column in the given file. Wait for the response and return the result.
|
| 639 |
+
|
| 640 |
+
:param relative_file_path: The relative path of the file that has the symbol for which definition should be looked up
|
| 641 |
+
:param line: The line number of the symbol
|
| 642 |
+
:param column: The column number of the symbol
|
| 643 |
+
|
| 644 |
+
:return List[multilspy_types.Location]: A list of locations where the symbol is defined
|
| 645 |
+
"""
|
| 646 |
+
if not self.server_started:
|
| 647 |
+
self.logger.log(
|
| 648 |
+
"request_definition called before Language Server started",
|
| 649 |
+
logging.ERROR,
|
| 650 |
+
)
|
| 651 |
+
raise SolidLSPException("Language Server not started")
|
| 652 |
+
|
| 653 |
+
if not self._has_waited_for_cross_file_references:
|
| 654 |
+
# Some LS require waiting for a while before they can return cross-file definitions.
|
| 655 |
+
# This is a workaround for such LS that don't have a reliable "finished initializing" signal.
|
| 656 |
+
sleep(self._get_wait_time_for_cross_file_referencing())
|
| 657 |
+
self._has_waited_for_cross_file_references = True
|
| 658 |
+
|
| 659 |
+
with self.open_file(relative_file_path):
|
| 660 |
+
# sending request to the language server and waiting for response
|
| 661 |
+
definition_params = cast(
|
| 662 |
+
DefinitionParams,
|
| 663 |
+
{
|
| 664 |
+
LSPConstants.TEXT_DOCUMENT: {
|
| 665 |
+
LSPConstants.URI: pathlib.Path(str(PurePath(self.repository_root_path, relative_file_path))).as_uri()
|
| 666 |
+
},
|
| 667 |
+
LSPConstants.POSITION: {
|
| 668 |
+
LSPConstants.LINE: line,
|
| 669 |
+
LSPConstants.CHARACTER: column,
|
| 670 |
+
},
|
| 671 |
+
},
|
| 672 |
+
)
|
| 673 |
+
response = self._send_definition_request(definition_params)
|
| 674 |
+
|
| 675 |
+
ret: list[ls_types.Location] = []
|
| 676 |
+
if isinstance(response, list):
|
| 677 |
+
# response is either of type Location[] or LocationLink[]
|
| 678 |
+
for item in response:
|
| 679 |
+
assert isinstance(item, dict)
|
| 680 |
+
if LSPConstants.URI in item and LSPConstants.RANGE in item:
|
| 681 |
+
new_item: ls_types.Location = {}
|
| 682 |
+
new_item.update(item)
|
| 683 |
+
new_item["absolutePath"] = PathUtils.uri_to_path(new_item["uri"])
|
| 684 |
+
new_item["relativePath"] = PathUtils.get_relative_path(new_item["absolutePath"], self.repository_root_path)
|
| 685 |
+
ret.append(ls_types.Location(new_item))
|
| 686 |
+
elif LSPConstants.TARGET_URI in item and LSPConstants.TARGET_RANGE in item and LSPConstants.TARGET_SELECTION_RANGE in item:
|
| 687 |
+
new_item: ls_types.Location = {}
|
| 688 |
+
new_item["uri"] = item[LSPConstants.TARGET_URI]
|
| 689 |
+
new_item["absolutePath"] = PathUtils.uri_to_path(new_item["uri"])
|
| 690 |
+
new_item["relativePath"] = PathUtils.get_relative_path(new_item["absolutePath"], self.repository_root_path)
|
| 691 |
+
new_item["range"] = item[LSPConstants.TARGET_SELECTION_RANGE]
|
| 692 |
+
ret.append(ls_types.Location(**new_item))
|
| 693 |
+
else:
|
| 694 |
+
assert False, f"Unexpected response from Language Server: {item}"
|
| 695 |
+
elif isinstance(response, dict):
|
| 696 |
+
# response is of type Location
|
| 697 |
+
assert LSPConstants.URI in response
|
| 698 |
+
assert LSPConstants.RANGE in response
|
| 699 |
+
|
| 700 |
+
new_item: ls_types.Location = {}
|
| 701 |
+
new_item.update(response)
|
| 702 |
+
new_item["absolutePath"] = PathUtils.uri_to_path(new_item["uri"])
|
| 703 |
+
new_item["relativePath"] = PathUtils.get_relative_path(new_item["absolutePath"], self.repository_root_path)
|
| 704 |
+
ret.append(ls_types.Location(**new_item))
|
| 705 |
+
elif response is None:
|
| 706 |
+
# Some language servers return None when they cannot find a definition
|
| 707 |
+
# This is expected for certain symbol types like generics or types with incomplete information
|
| 708 |
+
self.logger.log(
|
| 709 |
+
f"Language server returned None for definition request at {relative_file_path}:{line}:{column}",
|
| 710 |
+
logging.WARNING,
|
| 711 |
+
)
|
| 712 |
+
else:
|
| 713 |
+
assert False, f"Unexpected response from Language Server: {response}"
|
| 714 |
+
|
| 715 |
+
return ret
|
| 716 |
+
|
| 717 |
+
# Some LS cause problems with this, so the call is isolated from the rest to allow overriding in subclasses
|
| 718 |
+
def _send_references_request(self, relative_file_path: str, line: int, column: int) -> list[lsp_types.Location] | None:
|
| 719 |
+
return self.server.send.references(
|
| 720 |
+
{
|
| 721 |
+
"textDocument": {"uri": PathUtils.path_to_uri(os.path.join(self.repository_root_path, relative_file_path))},
|
| 722 |
+
"position": {"line": line, "character": column},
|
| 723 |
+
"context": {"includeDeclaration": False},
|
| 724 |
+
}
|
| 725 |
+
)
|
| 726 |
+
|
| 727 |
+
def request_references(self, relative_file_path: str, line: int, column: int) -> list[ls_types.Location]:
|
| 728 |
+
"""
|
| 729 |
+
Raise a [textDocument/references](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_references) request to the Language Server
|
| 730 |
+
to find references to the symbol at the given line and column in the given file. Wait for the response and return the result.
|
| 731 |
+
Filters out references located in ignored directories.
|
| 732 |
+
|
| 733 |
+
:param relative_file_path: The relative path of the file that has the symbol for which references should be looked up
|
| 734 |
+
:param line: The line number of the symbol
|
| 735 |
+
:param column: The column number of the symbol
|
| 736 |
+
|
| 737 |
+
:return: A list of locations where the symbol is referenced (excluding ignored directories)
|
| 738 |
+
"""
|
| 739 |
+
if not self.server_started:
|
| 740 |
+
self.logger.log(
|
| 741 |
+
"request_references called before Language Server started",
|
| 742 |
+
logging.ERROR,
|
| 743 |
+
)
|
| 744 |
+
raise SolidLSPException("Language Server not started")
|
| 745 |
+
|
| 746 |
+
if not self._has_waited_for_cross_file_references:
|
| 747 |
+
# Some LS require waiting for a while before they can return cross-file references.
|
| 748 |
+
# This is a workaround for such LS that don't have a reliable "finished initializing" signal.
|
| 749 |
+
sleep(self._get_wait_time_for_cross_file_referencing())
|
| 750 |
+
self._has_waited_for_cross_file_references = True
|
| 751 |
+
|
| 752 |
+
with self.open_file(relative_file_path):
|
| 753 |
+
try:
|
| 754 |
+
response = self._send_references_request(relative_file_path, line=line, column=column)
|
| 755 |
+
except Exception as e:
|
| 756 |
+
# Catch LSP internal error (-32603) and raise a more informative exception
|
| 757 |
+
if isinstance(e, LSPError) and getattr(e, "code", None) == -32603:
|
| 758 |
+
raise RuntimeError(
|
| 759 |
+
f"LSP internal error (-32603) when requesting references for {relative_file_path}:{line}:{column}. "
|
| 760 |
+
"This often occurs when requesting references for a symbol not referenced in the expected way. "
|
| 761 |
+
) from e
|
| 762 |
+
raise
|
| 763 |
+
if response is None:
|
| 764 |
+
return []
|
| 765 |
+
|
| 766 |
+
ret: list[ls_types.Location] = []
|
| 767 |
+
assert isinstance(response, list), f"Unexpected response from Language Server (expected list, got {type(response)}): {response}"
|
| 768 |
+
for item in response:
|
| 769 |
+
assert isinstance(item, dict), f"Unexpected response from Language Server (expected dict, got {type(item)}): {item}"
|
| 770 |
+
assert LSPConstants.URI in item
|
| 771 |
+
assert LSPConstants.RANGE in item
|
| 772 |
+
|
| 773 |
+
abs_path = PathUtils.uri_to_path(item[LSPConstants.URI])
|
| 774 |
+
if not Path(abs_path).is_relative_to(self.repository_root_path):
|
| 775 |
+
self.logger.log(
|
| 776 |
+
"Found a reference in a path outside the repository, probably the LS is parsing things in installed packages or in the standardlib! "
|
| 777 |
+
f"Path: {abs_path}. This is a bug but we currently simply skip these references.",
|
| 778 |
+
logging.WARNING,
|
| 779 |
+
)
|
| 780 |
+
continue
|
| 781 |
+
|
| 782 |
+
rel_path = Path(abs_path).relative_to(self.repository_root_path)
|
| 783 |
+
if self.is_ignored_path(str(rel_path)):
|
| 784 |
+
self.logger.log(f"Ignoring reference in {rel_path} since it should be ignored", logging.DEBUG)
|
| 785 |
+
continue
|
| 786 |
+
|
| 787 |
+
new_item: ls_types.Location = {}
|
| 788 |
+
new_item.update(item)
|
| 789 |
+
new_item["absolutePath"] = str(abs_path)
|
| 790 |
+
new_item["relativePath"] = str(rel_path)
|
| 791 |
+
ret.append(ls_types.Location(**new_item))
|
| 792 |
+
|
| 793 |
+
return ret
|
| 794 |
+
|
| 795 |
+
def request_text_document_diagnostics(self, relative_file_path: str) -> list[ls_types.Diagnostic]:
|
| 796 |
+
"""
|
| 797 |
+
Raise a [textDocument/diagnostic](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_diagnostic) request to the Language Server
|
| 798 |
+
to find diagnostics for the given file. Wait for the response and return the result.
|
| 799 |
+
|
| 800 |
+
:param relative_file_path: The relative path of the file to retrieve diagnostics for
|
| 801 |
+
|
| 802 |
+
:return: A list of diagnostics for the file
|
| 803 |
+
"""
|
| 804 |
+
if not self.server_started:
|
| 805 |
+
self.logger.log(
|
| 806 |
+
"request_text_document_diagnostics called before Language Server started",
|
| 807 |
+
logging.ERROR,
|
| 808 |
+
)
|
| 809 |
+
raise SolidLSPException("Language Server not started")
|
| 810 |
+
|
| 811 |
+
with self.open_file(relative_file_path):
|
| 812 |
+
response = self.server.send.text_document_diagnostic(
|
| 813 |
+
{
|
| 814 |
+
LSPConstants.TEXT_DOCUMENT: {
|
| 815 |
+
LSPConstants.URI: pathlib.Path(str(PurePath(self.repository_root_path, relative_file_path))).as_uri()
|
| 816 |
+
}
|
| 817 |
+
}
|
| 818 |
+
)
|
| 819 |
+
|
| 820 |
+
if response is None:
|
| 821 |
+
return []
|
| 822 |
+
|
| 823 |
+
assert isinstance(response, dict), f"Unexpected response from Language Server (expected list, got {type(response)}): {response}"
|
| 824 |
+
ret: list[ls_types.Diagnostic] = []
|
| 825 |
+
for item in response["items"]:
|
| 826 |
+
new_item: ls_types.Diagnostic = {
|
| 827 |
+
"uri": pathlib.Path(str(PurePath(self.repository_root_path, relative_file_path))).as_uri(),
|
| 828 |
+
"severity": item["severity"],
|
| 829 |
+
"message": item["message"],
|
| 830 |
+
"range": item["range"],
|
| 831 |
+
"code": item["code"],
|
| 832 |
+
}
|
| 833 |
+
ret.append(ls_types.Diagnostic(new_item))
|
| 834 |
+
|
| 835 |
+
return ret
|
| 836 |
+
|
| 837 |
+
def retrieve_full_file_content(self, file_path: str) -> str:
|
| 838 |
+
"""
|
| 839 |
+
Retrieve the full content of the given file.
|
| 840 |
+
"""
|
| 841 |
+
if os.path.isabs(file_path):
|
| 842 |
+
file_path = os.path.relpath(file_path, self.repository_root_path)
|
| 843 |
+
with self.open_file(file_path) as file_data:
|
| 844 |
+
return file_data.contents
|
| 845 |
+
|
| 846 |
+
def retrieve_content_around_line(
|
| 847 |
+
self, relative_file_path: str, line: int, context_lines_before: int = 0, context_lines_after: int = 0
|
| 848 |
+
) -> MatchedConsecutiveLines:
|
| 849 |
+
"""
|
| 850 |
+
Retrieve the content of the given file around the given line.
|
| 851 |
+
|
| 852 |
+
:param relative_file_path: The relative path of the file to retrieve the content from
|
| 853 |
+
:param line: The line number to retrieve the content around
|
| 854 |
+
:param context_lines_before: The number of lines to retrieve before the given line
|
| 855 |
+
:param context_lines_after: The number of lines to retrieve after the given line
|
| 856 |
+
|
| 857 |
+
:return MatchedConsecutiveLines: A container with the desired lines.
|
| 858 |
+
"""
|
| 859 |
+
with self.open_file(relative_file_path) as file_data:
|
| 860 |
+
file_contents = file_data.contents
|
| 861 |
+
return MatchedConsecutiveLines.from_file_contents(
|
| 862 |
+
file_contents,
|
| 863 |
+
line=line,
|
| 864 |
+
context_lines_before=context_lines_before,
|
| 865 |
+
context_lines_after=context_lines_after,
|
| 866 |
+
source_file_path=relative_file_path,
|
| 867 |
+
)
|
| 868 |
+
|
| 869 |
+
def request_completions(
|
| 870 |
+
self, relative_file_path: str, line: int, column: int, allow_incomplete: bool = False
|
| 871 |
+
) -> list[ls_types.CompletionItem]:
|
| 872 |
+
"""
|
| 873 |
+
Raise a [textDocument/completion](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_completion) request to the Language Server
|
| 874 |
+
to find completions at the given line and column in the given file. Wait for the response and return the result.
|
| 875 |
+
|
| 876 |
+
:param relative_file_path: The relative path of the file that has the symbol for which completions should be looked up
|
| 877 |
+
:param line: The line number of the symbol
|
| 878 |
+
:param column: The column number of the symbol
|
| 879 |
+
|
| 880 |
+
:return List[multilspy_types.CompletionItem]: A list of completions
|
| 881 |
+
"""
|
| 882 |
+
with self.open_file(relative_file_path):
|
| 883 |
+
open_file_buffer = self.open_file_buffers[pathlib.Path(os.path.join(self.repository_root_path, relative_file_path)).as_uri()]
|
| 884 |
+
completion_params: LSPTypes.CompletionParams = {
|
| 885 |
+
"position": {"line": line, "character": column},
|
| 886 |
+
"textDocument": {"uri": open_file_buffer.uri},
|
| 887 |
+
"context": {"triggerKind": LSPTypes.CompletionTriggerKind.Invoked},
|
| 888 |
+
}
|
| 889 |
+
response: list[LSPTypes.CompletionItem] | LSPTypes.CompletionList | None = None
|
| 890 |
+
|
| 891 |
+
num_retries = 0
|
| 892 |
+
while response is None or (response["isIncomplete"] and num_retries < 30):
|
| 893 |
+
self.completions_available.wait()
|
| 894 |
+
response: list[LSPTypes.CompletionItem] | LSPTypes.CompletionList | None = self.server.send.completion(completion_params)
|
| 895 |
+
if isinstance(response, list):
|
| 896 |
+
response = {"items": response, "isIncomplete": False}
|
| 897 |
+
num_retries += 1
|
| 898 |
+
|
| 899 |
+
# TODO: Understand how to appropriately handle `isIncomplete`
|
| 900 |
+
if response is None or (response["isIncomplete"] and not (allow_incomplete)):
|
| 901 |
+
return []
|
| 902 |
+
|
| 903 |
+
if "items" in response:
|
| 904 |
+
response = response["items"]
|
| 905 |
+
|
| 906 |
+
response = cast(list[LSPTypes.CompletionItem], response)
|
| 907 |
+
|
| 908 |
+
# TODO: Handle the case when the completion is a keyword
|
| 909 |
+
items = [item for item in response if item["kind"] != LSPTypes.CompletionItemKind.Keyword]
|
| 910 |
+
|
| 911 |
+
completions_list: list[ls_types.CompletionItem] = []
|
| 912 |
+
|
| 913 |
+
for item in items:
|
| 914 |
+
assert "insertText" in item or "textEdit" in item
|
| 915 |
+
assert "kind" in item
|
| 916 |
+
completion_item = {}
|
| 917 |
+
if "detail" in item:
|
| 918 |
+
completion_item["detail"] = item["detail"]
|
| 919 |
+
|
| 920 |
+
if "label" in item:
|
| 921 |
+
completion_item["completionText"] = item["label"]
|
| 922 |
+
completion_item["kind"] = item["kind"]
|
| 923 |
+
elif "insertText" in item:
|
| 924 |
+
completion_item["completionText"] = item["insertText"]
|
| 925 |
+
completion_item["kind"] = item["kind"]
|
| 926 |
+
elif "textEdit" in item and "newText" in item["textEdit"]:
|
| 927 |
+
completion_item["completionText"] = item["textEdit"]["newText"]
|
| 928 |
+
completion_item["kind"] = item["kind"]
|
| 929 |
+
elif "textEdit" in item and "range" in item["textEdit"]:
|
| 930 |
+
new_dot_lineno, new_dot_colno = (
|
| 931 |
+
completion_params["position"]["line"],
|
| 932 |
+
completion_params["position"]["character"],
|
| 933 |
+
)
|
| 934 |
+
assert all(
|
| 935 |
+
(
|
| 936 |
+
item["textEdit"]["range"]["start"]["line"] == new_dot_lineno,
|
| 937 |
+
item["textEdit"]["range"]["start"]["character"] == new_dot_colno,
|
| 938 |
+
item["textEdit"]["range"]["start"]["line"] == item["textEdit"]["range"]["end"]["line"],
|
| 939 |
+
item["textEdit"]["range"]["start"]["character"] == item["textEdit"]["range"]["end"]["character"],
|
| 940 |
+
)
|
| 941 |
+
)
|
| 942 |
+
|
| 943 |
+
completion_item["completionText"] = item["textEdit"]["newText"]
|
| 944 |
+
completion_item["kind"] = item["kind"]
|
| 945 |
+
elif "textEdit" in item and "insert" in item["textEdit"]:
|
| 946 |
+
assert False
|
| 947 |
+
else:
|
| 948 |
+
assert False
|
| 949 |
+
|
| 950 |
+
completion_item = ls_types.CompletionItem(**completion_item)
|
| 951 |
+
completions_list.append(completion_item)
|
| 952 |
+
|
| 953 |
+
return [json.loads(json_repr) for json_repr in set(json.dumps(item, sort_keys=True) for item in completions_list)]
|
| 954 |
+
|
| 955 |
+
def request_document_symbols(
|
| 956 |
+
self, relative_file_path: str, include_body: bool = False
|
| 957 |
+
) -> tuple[list[ls_types.UnifiedSymbolInformation], list[ls_types.UnifiedSymbolInformation]]:
|
| 958 |
+
"""
|
| 959 |
+
Raise a [textDocument/documentSymbol](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_documentSymbol) request to the Language Server
|
| 960 |
+
to find symbols in the given file. Wait for the response and return the result.
|
| 961 |
+
|
| 962 |
+
:param relative_file_path: The relative path of the file that has the symbols
|
| 963 |
+
:param include_body: whether to include the body of the symbols in the result.
|
| 964 |
+
:return: A list of symbols in the file, and a list of root symbols that represent the tree structure of the symbols.
|
| 965 |
+
All symbols will have a location, a children, and a parent attribute,
|
| 966 |
+
where the parent attribute is None for root symbols.
|
| 967 |
+
Note that this is slightly different from the call to request_full_symbol_tree,
|
| 968 |
+
where the parent attribute will be the file symbol which in turn may have a package symbol as parent.
|
| 969 |
+
If you need a symbol tree that contains file symbols as well, you should use `request_full_symbol_tree` instead.
|
| 970 |
+
"""
|
| 971 |
+
# TODO: it's kinda dumb to not use the cache if include_body is False after include_body was True once
|
| 972 |
+
# Should be fixed in the future, it's a small performance optimization
|
| 973 |
+
cache_key = f"{relative_file_path}-{include_body}"
|
| 974 |
+
with self.open_file(relative_file_path) as file_data:
|
| 975 |
+
with self._cache_lock:
|
| 976 |
+
file_hash_and_result = self._document_symbols_cache.get(cache_key)
|
| 977 |
+
if file_hash_and_result is not None:
|
| 978 |
+
file_hash, result = file_hash_and_result
|
| 979 |
+
if file_hash == file_data.content_hash:
|
| 980 |
+
self.logger.log(f"Returning cached document symbols for {relative_file_path}", logging.DEBUG)
|
| 981 |
+
return result
|
| 982 |
+
else:
|
| 983 |
+
self.logger.log(f"Content for {relative_file_path} has changed. Will overwrite in-memory cache", logging.DEBUG)
|
| 984 |
+
else:
|
| 985 |
+
self.logger.log(f"No cache hit for symbols with {include_body=} in {relative_file_path}", logging.DEBUG)
|
| 986 |
+
|
| 987 |
+
self.logger.log(f"Requesting document symbols for {relative_file_path} from the Language Server", logging.DEBUG)
|
| 988 |
+
response = self.server.send.document_symbol(
|
| 989 |
+
{"textDocument": {"uri": pathlib.Path(os.path.join(self.repository_root_path, relative_file_path)).as_uri()}}
|
| 990 |
+
)
|
| 991 |
+
if response is None:
|
| 992 |
+
self.logger.log(
|
| 993 |
+
f"Received None response from the Language Server for document symbols in {relative_file_path}. "
|
| 994 |
+
f"This means the language server can't understand this file (possibly due to syntax errors). It may also be due to a bug or misconfiguration of the LS. "
|
| 995 |
+
f"Returning empty list",
|
| 996 |
+
logging.WARNING,
|
| 997 |
+
)
|
| 998 |
+
return [], []
|
| 999 |
+
assert isinstance(response, list), f"Unexpected response from Language Server: {response}"
|
| 1000 |
+
self.logger.log(
|
| 1001 |
+
f"Received {len(response)} document symbols for {relative_file_path} from the Language Server",
|
| 1002 |
+
logging.DEBUG,
|
| 1003 |
+
)
|
| 1004 |
+
|
| 1005 |
+
def turn_item_into_symbol_with_children(item: GenericDocumentSymbol):
|
| 1006 |
+
item = cast(ls_types.UnifiedSymbolInformation, item)
|
| 1007 |
+
absolute_path = os.path.join(self.repository_root_path, relative_file_path)
|
| 1008 |
+
|
| 1009 |
+
# handle missing entries in location
|
| 1010 |
+
if "location" not in item:
|
| 1011 |
+
uri = pathlib.Path(absolute_path).as_uri()
|
| 1012 |
+
assert "range" in item
|
| 1013 |
+
tree_location = ls_types.Location(
|
| 1014 |
+
uri=uri,
|
| 1015 |
+
range=item["range"],
|
| 1016 |
+
absolutePath=absolute_path,
|
| 1017 |
+
relativePath=relative_file_path,
|
| 1018 |
+
)
|
| 1019 |
+
item["location"] = tree_location
|
| 1020 |
+
location = item["location"]
|
| 1021 |
+
if "absolutePath" not in location:
|
| 1022 |
+
location["absolutePath"] = absolute_path
|
| 1023 |
+
if "relativePath" not in location:
|
| 1024 |
+
location["relativePath"] = relative_file_path
|
| 1025 |
+
if include_body:
|
| 1026 |
+
item["body"] = self.retrieve_symbol_body(item)
|
| 1027 |
+
# handle missing selectionRange
|
| 1028 |
+
if "selectionRange" not in item:
|
| 1029 |
+
if "range" in item:
|
| 1030 |
+
item["selectionRange"] = item["range"]
|
| 1031 |
+
else:
|
| 1032 |
+
item["selectionRange"] = item["location"]["range"]
|
| 1033 |
+
children = item.get(LSPConstants.CHILDREN, [])
|
| 1034 |
+
for child in children:
|
| 1035 |
+
child["parent"] = item
|
| 1036 |
+
item[LSPConstants.CHILDREN] = children
|
| 1037 |
+
|
| 1038 |
+
flat_all_symbol_list: list[ls_types.UnifiedSymbolInformation] = []
|
| 1039 |
+
root_nodes: list[ls_types.UnifiedSymbolInformation] = []
|
| 1040 |
+
for root_item in response:
|
| 1041 |
+
if "range" not in root_item and "location" not in root_item:
|
| 1042 |
+
if root_item["kind"] in [SymbolKind.File, SymbolKind.Module]:
|
| 1043 |
+
...
|
| 1044 |
+
|
| 1045 |
+
# mutation is more convenient than creating a new dict,
|
| 1046 |
+
# so we cast and rename the var after the mutating call to turn_item_into_symbol_with_children
|
| 1047 |
+
# which turned and item into a "symbol"
|
| 1048 |
+
turn_item_into_symbol_with_children(root_item)
|
| 1049 |
+
root_symbol = cast(ls_types.UnifiedSymbolInformation, root_item)
|
| 1050 |
+
root_symbol["parent"] = None
|
| 1051 |
+
|
| 1052 |
+
root_nodes.append(root_symbol)
|
| 1053 |
+
assert isinstance(root_symbol, dict)
|
| 1054 |
+
assert LSPConstants.NAME in root_symbol
|
| 1055 |
+
assert LSPConstants.KIND in root_symbol
|
| 1056 |
+
|
| 1057 |
+
if LSPConstants.CHILDREN in root_symbol:
|
| 1058 |
+
# TODO: l_tree should be a list of TreeRepr. Define the following function to return TreeRepr as well
|
| 1059 |
+
|
| 1060 |
+
def visit_tree_nodes_and_build_tree_repr(node: GenericDocumentSymbol) -> list[ls_types.UnifiedSymbolInformation]:
|
| 1061 |
+
node = cast(ls_types.UnifiedSymbolInformation, node)
|
| 1062 |
+
l: list[ls_types.UnifiedSymbolInformation] = []
|
| 1063 |
+
turn_item_into_symbol_with_children(node)
|
| 1064 |
+
assert LSPConstants.CHILDREN in node
|
| 1065 |
+
children = node[LSPConstants.CHILDREN]
|
| 1066 |
+
l.append(node)
|
| 1067 |
+
for child in children:
|
| 1068 |
+
l.extend(visit_tree_nodes_and_build_tree_repr(child))
|
| 1069 |
+
return l
|
| 1070 |
+
|
| 1071 |
+
flat_all_symbol_list.extend(visit_tree_nodes_and_build_tree_repr(root_symbol))
|
| 1072 |
+
else:
|
| 1073 |
+
flat_all_symbol_list.append(ls_types.UnifiedSymbolInformation(**root_symbol))
|
| 1074 |
+
|
| 1075 |
+
result = flat_all_symbol_list, root_nodes
|
| 1076 |
+
self.logger.log(f"Caching document symbols for {relative_file_path}", logging.DEBUG)
|
| 1077 |
+
with self._cache_lock:
|
| 1078 |
+
self._document_symbols_cache[cache_key] = (file_data.content_hash, result)
|
| 1079 |
+
self._cache_has_changed = True
|
| 1080 |
+
return result
|
| 1081 |
+
|
| 1082 |
+
def request_full_symbol_tree(
|
| 1083 |
+
self, within_relative_path: str | None = None, include_body: bool = False
|
| 1084 |
+
) -> list[ls_types.UnifiedSymbolInformation]:
|
| 1085 |
+
"""
|
| 1086 |
+
Will go through all files in the project or within a relative path and build a tree of symbols.
|
| 1087 |
+
Note: this may be slow the first time it is called, especially if `within_relative_path` is not used to restrict the search.
|
| 1088 |
+
|
| 1089 |
+
For each file, a symbol of kind File (2) will be created. For directories, a symbol of kind Package (4) will be created.
|
| 1090 |
+
All symbols will have a children attribute, thereby representing the tree structure of all symbols in the project
|
| 1091 |
+
that are within the repository.
|
| 1092 |
+
All symbols except the root packages will have a parent attribute.
|
| 1093 |
+
Will ignore directories starting with '.', language-specific defaults
|
| 1094 |
+
and user-configured directories (e.g. from .gitignore).
|
| 1095 |
+
|
| 1096 |
+
:param within_relative_path: pass a relative path to only consider symbols within this path.
|
| 1097 |
+
If a file is passed, only the symbols within this file will be considered.
|
| 1098 |
+
If a directory is passed, all files within this directory will be considered.
|
| 1099 |
+
:param include_body: whether to include the body of the symbols in the result.
|
| 1100 |
+
|
| 1101 |
+
:return: A list of root symbols representing the top-level packages/modules in the project.
|
| 1102 |
+
"""
|
| 1103 |
+
if within_relative_path is not None:
|
| 1104 |
+
within_abs_path = os.path.join(self.repository_root_path, within_relative_path)
|
| 1105 |
+
if not os.path.exists(within_abs_path):
|
| 1106 |
+
raise FileNotFoundError(f"File or directory not found: {within_abs_path}")
|
| 1107 |
+
if os.path.isfile(within_abs_path):
|
| 1108 |
+
if self.is_ignored_path(within_relative_path):
|
| 1109 |
+
self.logger.log(
|
| 1110 |
+
f"You passed a file explicitly, but it is ignored. This is probably an error. File: {within_relative_path}",
|
| 1111 |
+
logging.ERROR,
|
| 1112 |
+
)
|
| 1113 |
+
return []
|
| 1114 |
+
else:
|
| 1115 |
+
_, root_nodes = self.request_document_symbols(within_relative_path, include_body=include_body)
|
| 1116 |
+
return root_nodes
|
| 1117 |
+
|
| 1118 |
+
# Helper function to recursively process directories
|
| 1119 |
+
def process_directory(rel_dir_path: str) -> list[ls_types.UnifiedSymbolInformation]:
|
| 1120 |
+
abs_dir_path = self.repository_root_path if rel_dir_path == "." else os.path.join(self.repository_root_path, rel_dir_path)
|
| 1121 |
+
abs_dir_path = os.path.realpath(abs_dir_path)
|
| 1122 |
+
|
| 1123 |
+
if self.is_ignored_path(str(Path(abs_dir_path).relative_to(self.repository_root_path))):
|
| 1124 |
+
self.logger.log(f"Skipping directory: {rel_dir_path}\n(because it should be ignored)", logging.DEBUG)
|
| 1125 |
+
return []
|
| 1126 |
+
|
| 1127 |
+
result = []
|
| 1128 |
+
try:
|
| 1129 |
+
contained_dir_or_file_names = os.listdir(abs_dir_path)
|
| 1130 |
+
except OSError:
|
| 1131 |
+
return []
|
| 1132 |
+
|
| 1133 |
+
# Create package symbol for directory
|
| 1134 |
+
package_symbol = ls_types.UnifiedSymbolInformation( # type: ignore
|
| 1135 |
+
name=os.path.basename(abs_dir_path),
|
| 1136 |
+
kind=ls_types.SymbolKind.Package,
|
| 1137 |
+
location=ls_types.Location(
|
| 1138 |
+
uri=str(pathlib.Path(abs_dir_path).as_uri()),
|
| 1139 |
+
range={"start": {"line": 0, "character": 0}, "end": {"line": 0, "character": 0}},
|
| 1140 |
+
absolutePath=str(abs_dir_path),
|
| 1141 |
+
relativePath=str(Path(abs_dir_path).resolve().relative_to(self.repository_root_path)),
|
| 1142 |
+
),
|
| 1143 |
+
children=[],
|
| 1144 |
+
)
|
| 1145 |
+
result.append(package_symbol)
|
| 1146 |
+
|
| 1147 |
+
for contained_dir_or_file_name in contained_dir_or_file_names:
|
| 1148 |
+
contained_dir_or_file_abs_path = os.path.join(abs_dir_path, contained_dir_or_file_name)
|
| 1149 |
+
contained_dir_or_file_rel_path = str(Path(contained_dir_or_file_abs_path).resolve().relative_to(self.repository_root_path))
|
| 1150 |
+
if self.is_ignored_path(contained_dir_or_file_rel_path):
|
| 1151 |
+
self.logger.log(f"Skipping item: {contained_dir_or_file_rel_path}\n(because it should be ignored)", logging.DEBUG)
|
| 1152 |
+
continue
|
| 1153 |
+
|
| 1154 |
+
if os.path.isdir(contained_dir_or_file_abs_path):
|
| 1155 |
+
child_symbols = process_directory(contained_dir_or_file_rel_path)
|
| 1156 |
+
package_symbol["children"].extend(child_symbols)
|
| 1157 |
+
for child in child_symbols:
|
| 1158 |
+
child["parent"] = package_symbol
|
| 1159 |
+
|
| 1160 |
+
elif os.path.isfile(contained_dir_or_file_abs_path):
|
| 1161 |
+
_, file_root_nodes = self.request_document_symbols(contained_dir_or_file_rel_path, include_body=include_body)
|
| 1162 |
+
|
| 1163 |
+
# Create file symbol, link with children
|
| 1164 |
+
file_rel_path = str(Path(contained_dir_or_file_abs_path).resolve().relative_to(self.repository_root_path))
|
| 1165 |
+
with self.open_file(file_rel_path) as file_data:
|
| 1166 |
+
fileRange = self._get_range_from_file_content(file_data.contents)
|
| 1167 |
+
file_symbol = ls_types.UnifiedSymbolInformation( # type: ignore
|
| 1168 |
+
name=os.path.splitext(contained_dir_or_file_name)[0],
|
| 1169 |
+
kind=ls_types.SymbolKind.File,
|
| 1170 |
+
range=fileRange,
|
| 1171 |
+
selectionRange=fileRange,
|
| 1172 |
+
location=ls_types.Location(
|
| 1173 |
+
uri=str(pathlib.Path(contained_dir_or_file_abs_path).as_uri()),
|
| 1174 |
+
range=fileRange,
|
| 1175 |
+
absolutePath=str(contained_dir_or_file_abs_path),
|
| 1176 |
+
relativePath=str(Path(contained_dir_or_file_abs_path).resolve().relative_to(self.repository_root_path)),
|
| 1177 |
+
),
|
| 1178 |
+
children=file_root_nodes,
|
| 1179 |
+
parent=package_symbol,
|
| 1180 |
+
)
|
| 1181 |
+
for child in file_root_nodes:
|
| 1182 |
+
child["parent"] = file_symbol
|
| 1183 |
+
|
| 1184 |
+
# Link file symbol with package
|
| 1185 |
+
package_symbol["children"].append(file_symbol)
|
| 1186 |
+
|
| 1187 |
+
# TODO: Not sure if this is actually still needed given recent changes to relative path handling
|
| 1188 |
+
def fix_relative_path(nodes: list[ls_types.UnifiedSymbolInformation]):
|
| 1189 |
+
for node in nodes:
|
| 1190 |
+
if "location" in node and "relativePath" in node["location"]:
|
| 1191 |
+
path = Path(node["location"]["relativePath"])
|
| 1192 |
+
if path.is_absolute():
|
| 1193 |
+
try:
|
| 1194 |
+
path = path.relative_to(self.repository_root_path)
|
| 1195 |
+
node["location"]["relativePath"] = str(path)
|
| 1196 |
+
except Exception:
|
| 1197 |
+
pass
|
| 1198 |
+
if "children" in node:
|
| 1199 |
+
fix_relative_path(node["children"])
|
| 1200 |
+
|
| 1201 |
+
fix_relative_path(file_root_nodes)
|
| 1202 |
+
|
| 1203 |
+
return result
|
| 1204 |
+
|
| 1205 |
+
# Start from the root or the specified directory
|
| 1206 |
+
start_rel_path = within_relative_path or "."
|
| 1207 |
+
return process_directory(start_rel_path)
|
| 1208 |
+
|
| 1209 |
+
@staticmethod
|
| 1210 |
+
def _get_range_from_file_content(file_content: str) -> ls_types.Range:
|
| 1211 |
+
"""
|
| 1212 |
+
Get the range for the given file.
|
| 1213 |
+
"""
|
| 1214 |
+
lines = file_content.split("\n")
|
| 1215 |
+
end_line = len(lines)
|
| 1216 |
+
end_column = len(lines[-1])
|
| 1217 |
+
return ls_types.Range(start=ls_types.Position(line=0, character=0), end=ls_types.Position(line=end_line, character=end_column))
|
| 1218 |
+
|
| 1219 |
+
def request_dir_overview(self, relative_dir_path: str) -> dict[str, list[UnifiedSymbolInformation]]:
|
| 1220 |
+
"""
|
| 1221 |
+
:return: A mapping of all relative paths analyzed to lists of top-level symbols in the corresponding file.
|
| 1222 |
+
"""
|
| 1223 |
+
symbol_tree = self.request_full_symbol_tree(relative_dir_path)
|
| 1224 |
+
# Initialize result dictionary
|
| 1225 |
+
result: dict[str, list[UnifiedSymbolInformation]] = defaultdict(list)
|
| 1226 |
+
|
| 1227 |
+
# Helper function to process a symbol and its children
|
| 1228 |
+
def process_symbol(symbol: ls_types.UnifiedSymbolInformation):
|
| 1229 |
+
if symbol["kind"] == ls_types.SymbolKind.File:
|
| 1230 |
+
# For file symbols, process their children (top-level symbols)
|
| 1231 |
+
for child in symbol["children"]:
|
| 1232 |
+
path = Path(child["location"]["absolutePath"]).resolve().relative_to(self.repository_root_path)
|
| 1233 |
+
result[str(path)].append(child)
|
| 1234 |
+
# For package/directory symbols, process their children
|
| 1235 |
+
for child in symbol["children"]:
|
| 1236 |
+
process_symbol(child)
|
| 1237 |
+
|
| 1238 |
+
# Process each root symbol
|
| 1239 |
+
for root in symbol_tree:
|
| 1240 |
+
process_symbol(root)
|
| 1241 |
+
return result
|
| 1242 |
+
|
| 1243 |
+
def request_document_overview(self, relative_file_path: str) -> list[UnifiedSymbolInformation]:
|
| 1244 |
+
"""
|
| 1245 |
+
:return: the top-level symbols in the given file.
|
| 1246 |
+
"""
|
| 1247 |
+
_, document_roots = self.request_document_symbols(relative_file_path)
|
| 1248 |
+
return document_roots
|
| 1249 |
+
|
| 1250 |
+
def request_overview(self, within_relative_path: str) -> dict[str, list[UnifiedSymbolInformation]]:
|
| 1251 |
+
"""
|
| 1252 |
+
An overview of all symbols in the given file or directory.
|
| 1253 |
+
|
| 1254 |
+
:param within_relative_path: the relative path to the file or directory to get the overview of.
|
| 1255 |
+
:return: A mapping of all relative paths analyzed to lists of top-level symbols in the corresponding file.
|
| 1256 |
+
"""
|
| 1257 |
+
abs_path = (Path(self.repository_root_path) / within_relative_path).resolve()
|
| 1258 |
+
if not abs_path.exists():
|
| 1259 |
+
raise FileNotFoundError(f"File or directory not found: {abs_path}")
|
| 1260 |
+
|
| 1261 |
+
if abs_path.is_file():
|
| 1262 |
+
symbols_overview = self.request_document_overview(within_relative_path)
|
| 1263 |
+
return {within_relative_path: symbols_overview}
|
| 1264 |
+
else:
|
| 1265 |
+
return self.request_dir_overview(within_relative_path)
|
| 1266 |
+
|
| 1267 |
+
def request_hover(self, relative_file_path: str, line: int, column: int) -> ls_types.Hover | None:
|
| 1268 |
+
"""
|
| 1269 |
+
Raise a [textDocument/hover](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_hover) request to the Language Server
|
| 1270 |
+
to find the hover information at the given line and column in the given file. Wait for the response and return the result.
|
| 1271 |
+
|
| 1272 |
+
:param relative_file_path: The relative path of the file that has the hover information
|
| 1273 |
+
:param line: The line number of the symbol
|
| 1274 |
+
:param column: The column number of the symbol
|
| 1275 |
+
|
| 1276 |
+
:return None
|
| 1277 |
+
"""
|
| 1278 |
+
with self.open_file(relative_file_path):
|
| 1279 |
+
response = self.server.send.hover(
|
| 1280 |
+
{
|
| 1281 |
+
"textDocument": {"uri": pathlib.Path(os.path.join(self.repository_root_path, relative_file_path)).as_uri()},
|
| 1282 |
+
"position": {
|
| 1283 |
+
"line": line,
|
| 1284 |
+
"character": column,
|
| 1285 |
+
},
|
| 1286 |
+
}
|
| 1287 |
+
)
|
| 1288 |
+
|
| 1289 |
+
if response is None:
|
| 1290 |
+
return None
|
| 1291 |
+
|
| 1292 |
+
assert isinstance(response, dict)
|
| 1293 |
+
|
| 1294 |
+
return ls_types.Hover(**response)
|
| 1295 |
+
|
| 1296 |
+
def retrieve_symbol_body(self, symbol: ls_types.UnifiedSymbolInformation | LSPTypes.DocumentSymbol | LSPTypes.SymbolInformation) -> str:
|
| 1297 |
+
"""
|
| 1298 |
+
Load the body of the given symbol. If the body is already contained in the symbol, just return it.
|
| 1299 |
+
"""
|
| 1300 |
+
existing_body = symbol.get("body", None)
|
| 1301 |
+
if existing_body:
|
| 1302 |
+
return existing_body
|
| 1303 |
+
|
| 1304 |
+
assert "location" in symbol
|
| 1305 |
+
symbol_start_line = symbol["location"]["range"]["start"]["line"]
|
| 1306 |
+
symbol_end_line = symbol["location"]["range"]["end"]["line"]
|
| 1307 |
+
assert "relativePath" in symbol["location"]
|
| 1308 |
+
symbol_file = self.retrieve_full_file_content(symbol["location"]["relativePath"])
|
| 1309 |
+
symbol_lines = symbol_file.split("\n")
|
| 1310 |
+
symbol_body = "\n".join(symbol_lines[symbol_start_line : symbol_end_line + 1])
|
| 1311 |
+
|
| 1312 |
+
# remove leading indentation
|
| 1313 |
+
symbol_start_column = symbol["location"]["range"]["start"]["character"]
|
| 1314 |
+
symbol_body = symbol_body[symbol_start_column:]
|
| 1315 |
+
return symbol_body
|
| 1316 |
+
|
| 1317 |
+
def request_referencing_symbols(
|
| 1318 |
+
self,
|
| 1319 |
+
relative_file_path: str,
|
| 1320 |
+
line: int,
|
| 1321 |
+
column: int,
|
| 1322 |
+
include_imports: bool = True,
|
| 1323 |
+
include_self: bool = False,
|
| 1324 |
+
include_body: bool = False,
|
| 1325 |
+
include_file_symbols: bool = False,
|
| 1326 |
+
) -> list[ReferenceInSymbol]:
|
| 1327 |
+
"""
|
| 1328 |
+
Finds all symbols that reference the symbol at the given location.
|
| 1329 |
+
This is similar to request_references but filters to only include symbols
|
| 1330 |
+
(functions, methods, classes, etc.) that reference the target symbol.
|
| 1331 |
+
|
| 1332 |
+
:param relative_file_path: The relative path to the file.
|
| 1333 |
+
:param line: The 0-indexed line number.
|
| 1334 |
+
:param column: The 0-indexed column number.
|
| 1335 |
+
:param include_imports: whether to also include imports as references.
|
| 1336 |
+
Unfortunately, the LSP does not have an import type, so the references corresponding to imports
|
| 1337 |
+
will not be easily distinguishable from definitions.
|
| 1338 |
+
:param include_self: whether to include the references that is the "input symbol" itself.
|
| 1339 |
+
Only has an effect if the relative_file_path, line and column point to a symbol, for example a definition.
|
| 1340 |
+
:param include_body: whether to include the body of the symbols in the result.
|
| 1341 |
+
:param include_file_symbols: whether to include references that are file symbols. This
|
| 1342 |
+
is often a fallback mechanism for when the reference cannot be resolved to a symbol.
|
| 1343 |
+
:return: List of objects containing the symbol and the location of the reference.
|
| 1344 |
+
"""
|
| 1345 |
+
if not self.server_started:
|
| 1346 |
+
self.logger.log(
|
| 1347 |
+
"request_referencing_symbols called before Language Server started",
|
| 1348 |
+
logging.ERROR,
|
| 1349 |
+
)
|
| 1350 |
+
raise SolidLSPException("Language Server not started")
|
| 1351 |
+
|
| 1352 |
+
# First, get all references to the symbol
|
| 1353 |
+
references = self.request_references(relative_file_path, line, column)
|
| 1354 |
+
if not references:
|
| 1355 |
+
return []
|
| 1356 |
+
|
| 1357 |
+
# For each reference, find the containing symbol
|
| 1358 |
+
result = []
|
| 1359 |
+
incoming_symbol = None
|
| 1360 |
+
for ref in references:
|
| 1361 |
+
ref_path = ref["relativePath"]
|
| 1362 |
+
ref_line = ref["range"]["start"]["line"]
|
| 1363 |
+
ref_col = ref["range"]["start"]["character"]
|
| 1364 |
+
|
| 1365 |
+
with self.open_file(ref_path) as file_data:
|
| 1366 |
+
# Get the containing symbol for this reference
|
| 1367 |
+
containing_symbol = self.request_containing_symbol(ref_path, ref_line, ref_col, include_body=include_body)
|
| 1368 |
+
if containing_symbol is None:
|
| 1369 |
+
# TODO: HORRIBLE HACK! I don't know how to do it better for now...
|
| 1370 |
+
# THIS IS BOUND TO BREAK IN MANY CASES! IT IS ALSO SPECIFIC TO PYTHON!
|
| 1371 |
+
# Background:
|
| 1372 |
+
# When a variable is used to change something, like
|
| 1373 |
+
#
|
| 1374 |
+
# instance = MyClass()
|
| 1375 |
+
# instance.status = "new status"
|
| 1376 |
+
#
|
| 1377 |
+
# we can't find the containing symbol for the reference to `status`
|
| 1378 |
+
# since there is no container on the line of the reference
|
| 1379 |
+
# The hack is to try to find a variable symbol in the containing module
|
| 1380 |
+
# by using the text of the reference to find the variable name (In a very heuristic way)
|
| 1381 |
+
# and then look for a symbol with that name and kind Variable
|
| 1382 |
+
ref_text = file_data.contents.split("\n")[ref_line]
|
| 1383 |
+
if "." in ref_text:
|
| 1384 |
+
containing_symbol_name = ref_text.split(".")[0]
|
| 1385 |
+
all_symbols, _ = self.request_document_symbols(ref_path)
|
| 1386 |
+
for symbol in all_symbols:
|
| 1387 |
+
if symbol["name"] == containing_symbol_name and symbol["kind"] == ls_types.SymbolKind.Variable:
|
| 1388 |
+
containing_symbol = copy(symbol)
|
| 1389 |
+
containing_symbol["location"] = ref
|
| 1390 |
+
containing_symbol["range"] = ref["range"]
|
| 1391 |
+
break
|
| 1392 |
+
|
| 1393 |
+
# We failed retrieving the symbol, falling back to creating a file symbol
|
| 1394 |
+
if containing_symbol is None and include_file_symbols:
|
| 1395 |
+
self.logger.log(
|
| 1396 |
+
f"Could not find containing symbol for {ref_path}:{ref_line}:{ref_col}. Returning file symbol instead",
|
| 1397 |
+
logging.WARNING,
|
| 1398 |
+
)
|
| 1399 |
+
fileRange = self._get_range_from_file_content(file_data.contents)
|
| 1400 |
+
location = ls_types.Location(
|
| 1401 |
+
uri=str(pathlib.Path(os.path.join(self.repository_root_path, ref_path)).as_uri()),
|
| 1402 |
+
range=fileRange,
|
| 1403 |
+
absolutePath=str(os.path.join(self.repository_root_path, ref_path)),
|
| 1404 |
+
relativePath=ref_path,
|
| 1405 |
+
)
|
| 1406 |
+
name = os.path.splitext(os.path.basename(ref_path))[0]
|
| 1407 |
+
|
| 1408 |
+
if include_body:
|
| 1409 |
+
body = self.retrieve_full_file_content(ref_path)
|
| 1410 |
+
else:
|
| 1411 |
+
body = ""
|
| 1412 |
+
|
| 1413 |
+
containing_symbol = ls_types.UnifiedSymbolInformation(
|
| 1414 |
+
kind=ls_types.SymbolKind.File,
|
| 1415 |
+
range=fileRange,
|
| 1416 |
+
selectionRange=fileRange,
|
| 1417 |
+
location=location,
|
| 1418 |
+
name=name,
|
| 1419 |
+
children=[],
|
| 1420 |
+
body=body,
|
| 1421 |
+
)
|
| 1422 |
+
if containing_symbol is None or (not include_file_symbols and containing_symbol["kind"] == ls_types.SymbolKind.File):
|
| 1423 |
+
continue
|
| 1424 |
+
|
| 1425 |
+
assert "location" in containing_symbol
|
| 1426 |
+
assert "selectionRange" in containing_symbol
|
| 1427 |
+
|
| 1428 |
+
# Checking for self-reference
|
| 1429 |
+
if (
|
| 1430 |
+
containing_symbol["location"]["relativePath"] == relative_file_path
|
| 1431 |
+
and containing_symbol["selectionRange"]["start"]["line"] == ref_line
|
| 1432 |
+
and containing_symbol["selectionRange"]["start"]["character"] == ref_col
|
| 1433 |
+
):
|
| 1434 |
+
incoming_symbol = containing_symbol
|
| 1435 |
+
if include_self:
|
| 1436 |
+
result.append(ReferenceInSymbol(symbol=containing_symbol, line=ref_line, character=ref_col))
|
| 1437 |
+
continue
|
| 1438 |
+
self.logger.log(f"Found self-reference for {incoming_symbol['name']}, skipping it since {include_self=}", logging.DEBUG)
|
| 1439 |
+
continue
|
| 1440 |
+
|
| 1441 |
+
# checking whether reference is an import
|
| 1442 |
+
# This is neither really safe nor elegant, but if we don't do it,
|
| 1443 |
+
# there is no way to distinguish between definitions and imports as import is not a symbol-type
|
| 1444 |
+
# and we get the type referenced symbol resulting from imports...
|
| 1445 |
+
if (
|
| 1446 |
+
not include_imports
|
| 1447 |
+
and incoming_symbol is not None
|
| 1448 |
+
and containing_symbol["name"] == incoming_symbol["name"]
|
| 1449 |
+
and containing_symbol["kind"] == incoming_symbol["kind"]
|
| 1450 |
+
):
|
| 1451 |
+
self.logger.log(
|
| 1452 |
+
f"Found import of referenced symbol {incoming_symbol['name']}"
|
| 1453 |
+
f"in {containing_symbol['location']['relativePath']}, skipping",
|
| 1454 |
+
logging.DEBUG,
|
| 1455 |
+
)
|
| 1456 |
+
continue
|
| 1457 |
+
|
| 1458 |
+
result.append(ReferenceInSymbol(symbol=containing_symbol, line=ref_line, character=ref_col))
|
| 1459 |
+
|
| 1460 |
+
return result
|
| 1461 |
+
|
| 1462 |
+
def request_containing_symbol(
|
| 1463 |
+
self,
|
| 1464 |
+
relative_file_path: str,
|
| 1465 |
+
line: int,
|
| 1466 |
+
column: int | None = None,
|
| 1467 |
+
strict: bool = False,
|
| 1468 |
+
include_body: bool = False,
|
| 1469 |
+
) -> ls_types.UnifiedSymbolInformation | None:
|
| 1470 |
+
"""
|
| 1471 |
+
Finds the first symbol containing the position for the given file.
|
| 1472 |
+
For Python, container symbols are considered to be those with kinds corresponding to
|
| 1473 |
+
functions, methods, or classes (typically: Function (12), Method (6), Class (5)).
|
| 1474 |
+
|
| 1475 |
+
The method operates as follows:
|
| 1476 |
+
- Request the document symbols for the file.
|
| 1477 |
+
- Filter symbols to those that start at or before the given line.
|
| 1478 |
+
- From these, first look for symbols whose range contains the (line, column).
|
| 1479 |
+
- If one or more symbols contain the position, return the one with the greatest starting position
|
| 1480 |
+
(i.e. the innermost container).
|
| 1481 |
+
- If none (strictly) contain the position, return the symbol with the greatest starting position
|
| 1482 |
+
among those above the given line.
|
| 1483 |
+
- If no container candidates are found, return None.
|
| 1484 |
+
|
| 1485 |
+
:param relative_file_path: The relative path to the Python file.
|
| 1486 |
+
:param line: The 0-indexed line number.
|
| 1487 |
+
:param column: The 0-indexed column (also called character). If not passed, the lookup will be based
|
| 1488 |
+
only on the line.
|
| 1489 |
+
:param strict: If True, the position must be strictly within the range of the symbol.
|
| 1490 |
+
Setting to True is useful for example for finding the parent of a symbol, as with strict=False,
|
| 1491 |
+
and the line pointing to a symbol itself, the containing symbol will be the symbol itself
|
| 1492 |
+
(and not the parent).
|
| 1493 |
+
:param include_body: Whether to include the body of the symbol in the result.
|
| 1494 |
+
:return: The container symbol (if found) or None.
|
| 1495 |
+
"""
|
| 1496 |
+
# checking if the line is empty, unfortunately ugly and duplicating code, but I don't want to refactor
|
| 1497 |
+
with self.open_file(relative_file_path):
|
| 1498 |
+
absolute_file_path = str(PurePath(self.repository_root_path, relative_file_path))
|
| 1499 |
+
content = FileUtils.read_file(self.logger, absolute_file_path)
|
| 1500 |
+
if content.split("\n")[line].strip() == "":
|
| 1501 |
+
self.logger.log(
|
| 1502 |
+
f"Passing empty lines to request_container_symbol is currently not supported, {relative_file_path=}, {line=}",
|
| 1503 |
+
logging.ERROR,
|
| 1504 |
+
)
|
| 1505 |
+
return None
|
| 1506 |
+
|
| 1507 |
+
symbols, _ = self.request_document_symbols(relative_file_path)
|
| 1508 |
+
|
| 1509 |
+
# make jedi and pyright api compatible
|
| 1510 |
+
# the former has no location, the later has no range
|
| 1511 |
+
# we will just always add location of the desired format to all symbols
|
| 1512 |
+
for symbol in symbols:
|
| 1513 |
+
if "location" not in symbol:
|
| 1514 |
+
range = symbol["range"]
|
| 1515 |
+
location = ls_types.Location(
|
| 1516 |
+
uri=f"file:/{absolute_file_path}",
|
| 1517 |
+
range=range,
|
| 1518 |
+
absolutePath=absolute_file_path,
|
| 1519 |
+
relativePath=relative_file_path,
|
| 1520 |
+
)
|
| 1521 |
+
symbol["location"] = location
|
| 1522 |
+
else:
|
| 1523 |
+
location = symbol["location"]
|
| 1524 |
+
assert "range" in location
|
| 1525 |
+
location["absolutePath"] = absolute_file_path
|
| 1526 |
+
location["relativePath"] = relative_file_path
|
| 1527 |
+
location["uri"] = Path(absolute_file_path).as_uri()
|
| 1528 |
+
|
| 1529 |
+
# Allowed container kinds, currently only for Python
|
| 1530 |
+
container_symbol_kinds = {ls_types.SymbolKind.Method, ls_types.SymbolKind.Function, ls_types.SymbolKind.Class}
|
| 1531 |
+
|
| 1532 |
+
def is_position_in_range(line: int, range_d: ls_types.Range) -> bool:
|
| 1533 |
+
start = range_d["start"]
|
| 1534 |
+
end = range_d["end"]
|
| 1535 |
+
|
| 1536 |
+
column_condition = True
|
| 1537 |
+
if strict:
|
| 1538 |
+
line_condition = end["line"] >= line > start["line"]
|
| 1539 |
+
if column is not None and line == start["line"]:
|
| 1540 |
+
column_condition = column > start["character"]
|
| 1541 |
+
else:
|
| 1542 |
+
line_condition = end["line"] >= line >= start["line"]
|
| 1543 |
+
if column is not None and line == start["line"]:
|
| 1544 |
+
column_condition = column >= start["character"]
|
| 1545 |
+
return line_condition and column_condition
|
| 1546 |
+
|
| 1547 |
+
# Only consider containers that are not one-liners (otherwise we may get imports)
|
| 1548 |
+
candidate_containers = [
|
| 1549 |
+
s
|
| 1550 |
+
for s in symbols
|
| 1551 |
+
if s["kind"] in container_symbol_kinds and s["location"]["range"]["start"]["line"] != s["location"]["range"]["end"]["line"]
|
| 1552 |
+
]
|
| 1553 |
+
var_containers = [s for s in symbols if s["kind"] == ls_types.SymbolKind.Variable]
|
| 1554 |
+
candidate_containers.extend(var_containers)
|
| 1555 |
+
|
| 1556 |
+
if not candidate_containers:
|
| 1557 |
+
return None
|
| 1558 |
+
|
| 1559 |
+
# From the candidates, find those whose range contains the given position.
|
| 1560 |
+
containing_symbols = []
|
| 1561 |
+
for symbol in candidate_containers:
|
| 1562 |
+
s_range = symbol["location"]["range"]
|
| 1563 |
+
if not is_position_in_range(line, s_range):
|
| 1564 |
+
continue
|
| 1565 |
+
containing_symbols.append(symbol)
|
| 1566 |
+
|
| 1567 |
+
if containing_symbols:
|
| 1568 |
+
# Return the one with the greatest starting position (i.e. the innermost container).
|
| 1569 |
+
containing_symbol = max(containing_symbols, key=lambda s: s["location"]["range"]["start"]["line"])
|
| 1570 |
+
if include_body:
|
| 1571 |
+
containing_symbol["body"] = self.retrieve_symbol_body(containing_symbol)
|
| 1572 |
+
return containing_symbol
|
| 1573 |
+
else:
|
| 1574 |
+
return None
|
| 1575 |
+
|
| 1576 |
+
def request_container_of_symbol(
|
| 1577 |
+
self, symbol: ls_types.UnifiedSymbolInformation, include_body: bool = False
|
| 1578 |
+
) -> ls_types.UnifiedSymbolInformation | None:
|
| 1579 |
+
"""
|
| 1580 |
+
Finds the container of the given symbol if there is one. If the parent attribute is present, the parent is returned
|
| 1581 |
+
without further searching.
|
| 1582 |
+
|
| 1583 |
+
:param symbol: The symbol to find the container of.
|
| 1584 |
+
:param include_body: whether to include the body of the symbol in the result.
|
| 1585 |
+
:return: The container of the given symbol or None if no container is found.
|
| 1586 |
+
"""
|
| 1587 |
+
if "parent" in symbol:
|
| 1588 |
+
return symbol["parent"]
|
| 1589 |
+
assert "location" in symbol, f"Symbol {symbol} has no location and no parent attribute"
|
| 1590 |
+
return self.request_containing_symbol(
|
| 1591 |
+
symbol["location"]["relativePath"],
|
| 1592 |
+
symbol["location"]["range"]["start"]["line"],
|
| 1593 |
+
symbol["location"]["range"]["start"]["character"],
|
| 1594 |
+
strict=True,
|
| 1595 |
+
include_body=include_body,
|
| 1596 |
+
)
|
| 1597 |
+
|
| 1598 |
+
def request_defining_symbol(
|
| 1599 |
+
self,
|
| 1600 |
+
relative_file_path: str,
|
| 1601 |
+
line: int,
|
| 1602 |
+
column: int,
|
| 1603 |
+
include_body: bool = False,
|
| 1604 |
+
) -> ls_types.UnifiedSymbolInformation | None:
|
| 1605 |
+
"""
|
| 1606 |
+
Finds the symbol that defines the symbol at the given location.
|
| 1607 |
+
|
| 1608 |
+
This method first finds the definition of the symbol at the given position,
|
| 1609 |
+
then retrieves the full symbol information for that definition.
|
| 1610 |
+
|
| 1611 |
+
:param relative_file_path: The relative path to the file.
|
| 1612 |
+
:param line: The 0-indexed line number.
|
| 1613 |
+
:param column: The 0-indexed column number.
|
| 1614 |
+
:param include_body: whether to include the body of the symbol in the result.
|
| 1615 |
+
:return: The symbol information for the definition, or None if not found.
|
| 1616 |
+
"""
|
| 1617 |
+
if not self.server_started:
|
| 1618 |
+
self.logger.log(
|
| 1619 |
+
"request_defining_symbol called before Language Server started",
|
| 1620 |
+
logging.ERROR,
|
| 1621 |
+
)
|
| 1622 |
+
raise SolidLSPException("Language Server not started")
|
| 1623 |
+
|
| 1624 |
+
# Get the definition location(s)
|
| 1625 |
+
definitions = self.request_definition(relative_file_path, line, column)
|
| 1626 |
+
if not definitions:
|
| 1627 |
+
return None
|
| 1628 |
+
|
| 1629 |
+
# Use the first definition location
|
| 1630 |
+
definition = definitions[0]
|
| 1631 |
+
def_path = definition["relativePath"]
|
| 1632 |
+
def_line = definition["range"]["start"]["line"]
|
| 1633 |
+
def_col = definition["range"]["start"]["character"]
|
| 1634 |
+
|
| 1635 |
+
# Find the symbol at or containing this location
|
| 1636 |
+
defining_symbol = self.request_containing_symbol(def_path, def_line, def_col, strict=False, include_body=include_body)
|
| 1637 |
+
|
| 1638 |
+
return defining_symbol
|
| 1639 |
+
|
| 1640 |
+
@property
|
| 1641 |
+
def cache_path(self) -> Path:
|
| 1642 |
+
"""
|
| 1643 |
+
The path to the cache file for the document symbols.
|
| 1644 |
+
"""
|
| 1645 |
+
return (
|
| 1646 |
+
Path(self.repository_root_path)
|
| 1647 |
+
/ self._solidlsp_settings.project_data_relative_path
|
| 1648 |
+
/ self.CACHE_FOLDER_NAME
|
| 1649 |
+
/ self.language_id
|
| 1650 |
+
/ "document_symbols_cache_v23-06-25.pkl"
|
| 1651 |
+
)
|
| 1652 |
+
|
| 1653 |
+
def save_cache(self):
|
| 1654 |
+
with self._cache_lock:
|
| 1655 |
+
if not self._cache_has_changed:
|
| 1656 |
+
self.logger.log("No changes to document symbols cache, skipping save", logging.DEBUG)
|
| 1657 |
+
return
|
| 1658 |
+
|
| 1659 |
+
self.logger.log(f"Saving updated document symbols cache to {self.cache_path}", logging.INFO)
|
| 1660 |
+
self.cache_path.parent.mkdir(parents=True, exist_ok=True)
|
| 1661 |
+
try:
|
| 1662 |
+
with open(self.cache_path, "wb") as f:
|
| 1663 |
+
pickle.dump(self._document_symbols_cache, f)
|
| 1664 |
+
self._cache_has_changed = False
|
| 1665 |
+
except Exception as e:
|
| 1666 |
+
self.logger.log(
|
| 1667 |
+
f"Failed to save document symbols cache to {self.cache_path}: {e}. "
|
| 1668 |
+
"Note: this may have resulted in a corrupted cache file.",
|
| 1669 |
+
logging.ERROR,
|
| 1670 |
+
)
|
| 1671 |
+
|
| 1672 |
+
def load_cache(self):
|
| 1673 |
+
if not self.cache_path.exists():
|
| 1674 |
+
return
|
| 1675 |
+
|
| 1676 |
+
with self._cache_lock:
|
| 1677 |
+
self.logger.log(f"Loading document symbols cache from {self.cache_path}", logging.INFO)
|
| 1678 |
+
try:
|
| 1679 |
+
with open(self.cache_path, "rb") as f:
|
| 1680 |
+
self._document_symbols_cache = pickle.load(f)
|
| 1681 |
+
self.logger.log(f"Loaded {len(self._document_symbols_cache)} document symbols from cache.", logging.INFO)
|
| 1682 |
+
except Exception as e:
|
| 1683 |
+
# cache often becomes corrupt, so just skip loading it
|
| 1684 |
+
self.logger.log(
|
| 1685 |
+
f"Failed to load document symbols cache from {self.cache_path}: {e}. Possible cause: the cache file is corrupted. "
|
| 1686 |
+
"Check for any errors related to saving the cache in the logs.",
|
| 1687 |
+
logging.ERROR,
|
| 1688 |
+
)
|
| 1689 |
+
|
| 1690 |
+
def request_workspace_symbol(self, query: str) -> list[ls_types.UnifiedSymbolInformation] | None:
|
| 1691 |
+
"""
|
| 1692 |
+
Raise a [workspace/symbol](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#workspace_symbol) request to the Language Server
|
| 1693 |
+
to find symbols across the whole workspace. Wait for the response and return the result.
|
| 1694 |
+
|
| 1695 |
+
:param query: The query string to filter symbols by
|
| 1696 |
+
|
| 1697 |
+
:return: A list of matching symbols
|
| 1698 |
+
"""
|
| 1699 |
+
response = self.server.send.workspace_symbol({"query": query})
|
| 1700 |
+
if response is None:
|
| 1701 |
+
return None
|
| 1702 |
+
|
| 1703 |
+
assert isinstance(response, list)
|
| 1704 |
+
|
| 1705 |
+
ret: list[ls_types.UnifiedSymbolInformation] = []
|
| 1706 |
+
for item in response:
|
| 1707 |
+
assert isinstance(item, dict)
|
| 1708 |
+
|
| 1709 |
+
assert LSPConstants.NAME in item
|
| 1710 |
+
assert LSPConstants.KIND in item
|
| 1711 |
+
assert LSPConstants.LOCATION in item
|
| 1712 |
+
|
| 1713 |
+
ret.append(ls_types.UnifiedSymbolInformation(**item))
|
| 1714 |
+
|
| 1715 |
+
return ret
|
| 1716 |
+
|
| 1717 |
+
def start(self) -> "SolidLanguageServer":
|
| 1718 |
+
"""
|
| 1719 |
+
Starts the language server process and connects to it. Call shutdown when ready.
|
| 1720 |
+
|
| 1721 |
+
:return: self for method chaining
|
| 1722 |
+
"""
|
| 1723 |
+
self.logger.log(
|
| 1724 |
+
f"Starting language server with language {self.language_server.language} for {self.language_server.repository_root_path}",
|
| 1725 |
+
logging.INFO,
|
| 1726 |
+
)
|
| 1727 |
+
self._server_context = self._start_server_process()
|
| 1728 |
+
return self
|
| 1729 |
+
|
| 1730 |
+
def stop(self, shutdown_timeout: float = 2.0) -> None:
|
| 1731 |
+
self._shutdown(timeout=shutdown_timeout)
|
| 1732 |
+
|
| 1733 |
+
@property
|
| 1734 |
+
def language_server(self) -> Self:
|
| 1735 |
+
return self
|
| 1736 |
+
|
| 1737 |
+
def is_running(self) -> bool:
|
| 1738 |
+
return self.server.is_running()
|
projects/ui/serena-new/src/solidlsp/ls_config.py
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Configuration objects for language servers
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import fnmatch
|
| 6 |
+
from collections.abc import Iterable
|
| 7 |
+
from dataclasses import dataclass, field
|
| 8 |
+
from enum import Enum
|
| 9 |
+
from typing import Self
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class FilenameMatcher:
|
| 13 |
+
def __init__(self, *patterns: str) -> None:
|
| 14 |
+
"""
|
| 15 |
+
:param patterns: fnmatch-compatible patterns
|
| 16 |
+
"""
|
| 17 |
+
self.patterns = patterns
|
| 18 |
+
|
| 19 |
+
def is_relevant_filename(self, fn: str) -> bool:
|
| 20 |
+
for pattern in self.patterns:
|
| 21 |
+
if fnmatch.fnmatch(fn, pattern):
|
| 22 |
+
return True
|
| 23 |
+
return False
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class Language(str, Enum):
|
| 27 |
+
"""
|
| 28 |
+
Possible languages with Multilspy.
|
| 29 |
+
"""
|
| 30 |
+
|
| 31 |
+
CSHARP = "csharp"
|
| 32 |
+
PYTHON = "python"
|
| 33 |
+
RUST = "rust"
|
| 34 |
+
JAVA = "java"
|
| 35 |
+
KOTLIN = "kotlin"
|
| 36 |
+
TYPESCRIPT = "typescript"
|
| 37 |
+
GO = "go"
|
| 38 |
+
RUBY = "ruby"
|
| 39 |
+
DART = "dart"
|
| 40 |
+
CPP = "cpp"
|
| 41 |
+
PHP = "php"
|
| 42 |
+
R = "r"
|
| 43 |
+
CLOJURE = "clojure"
|
| 44 |
+
ELIXIR = "elixir"
|
| 45 |
+
TERRAFORM = "terraform"
|
| 46 |
+
SWIFT = "swift"
|
| 47 |
+
BASH = "bash"
|
| 48 |
+
ZIG = "zig"
|
| 49 |
+
LUA = "lua"
|
| 50 |
+
NIX = "nix"
|
| 51 |
+
ERLANG = "erlang"
|
| 52 |
+
# Experimental or deprecated Language Servers
|
| 53 |
+
TYPESCRIPT_VTS = "typescript_vts"
|
| 54 |
+
"""Use the typescript language server through the natively bundled vscode extension via https://github.com/yioneko/vtsls"""
|
| 55 |
+
PYTHON_JEDI = "python_jedi"
|
| 56 |
+
"""Jedi language server for Python (instead of pyright, which is the default)"""
|
| 57 |
+
CSHARP_OMNISHARP = "csharp_omnisharp"
|
| 58 |
+
"""OmniSharp language server for C# (instead of the default csharp-ls by microsoft).
|
| 59 |
+
Currently has problems with finding references, and generally seems less stable and performant.
|
| 60 |
+
"""
|
| 61 |
+
RUBY_SOLARGRAPH = "ruby_solargraph"
|
| 62 |
+
"""Solargraph language server for Ruby (legacy, experimental).
|
| 63 |
+
Use Language.RUBY (ruby-lsp) for better performance and modern LSP features.
|
| 64 |
+
"""
|
| 65 |
+
|
| 66 |
+
@classmethod
|
| 67 |
+
def iter_all(cls, include_experimental: bool = False) -> Iterable[Self]:
|
| 68 |
+
for lang in cls:
|
| 69 |
+
if include_experimental or not lang.is_experimental():
|
| 70 |
+
yield lang
|
| 71 |
+
|
| 72 |
+
def is_experimental(self) -> bool:
|
| 73 |
+
"""
|
| 74 |
+
Check if the language server is experimental or deprecated.
|
| 75 |
+
"""
|
| 76 |
+
return self in {self.TYPESCRIPT_VTS, self.PYTHON_JEDI, self.CSHARP_OMNISHARP, self.RUBY_SOLARGRAPH}
|
| 77 |
+
|
| 78 |
+
def __str__(self) -> str:
|
| 79 |
+
return self.value
|
| 80 |
+
|
| 81 |
+
def get_source_fn_matcher(self) -> FilenameMatcher:
|
| 82 |
+
match self:
|
| 83 |
+
case self.PYTHON | self.PYTHON_JEDI:
|
| 84 |
+
return FilenameMatcher("*.py", "*.pyi")
|
| 85 |
+
case self.JAVA:
|
| 86 |
+
return FilenameMatcher("*.java")
|
| 87 |
+
case self.TYPESCRIPT | self.TYPESCRIPT_VTS:
|
| 88 |
+
# see https://github.com/oraios/serena/issues/204
|
| 89 |
+
path_patterns = []
|
| 90 |
+
for prefix in ["c", "m", ""]:
|
| 91 |
+
for postfix in ["x", ""]:
|
| 92 |
+
for base_pattern in ["ts", "js"]:
|
| 93 |
+
path_patterns.append(f"*.{prefix}{base_pattern}{postfix}")
|
| 94 |
+
return FilenameMatcher(*path_patterns)
|
| 95 |
+
case self.CSHARP | self.CSHARP_OMNISHARP:
|
| 96 |
+
return FilenameMatcher("*.cs")
|
| 97 |
+
case self.RUST:
|
| 98 |
+
return FilenameMatcher("*.rs")
|
| 99 |
+
case self.GO:
|
| 100 |
+
return FilenameMatcher("*.go")
|
| 101 |
+
case self.RUBY:
|
| 102 |
+
return FilenameMatcher("*.rb", "*.erb")
|
| 103 |
+
case self.RUBY_SOLARGRAPH:
|
| 104 |
+
return FilenameMatcher("*.rb")
|
| 105 |
+
case self.CPP:
|
| 106 |
+
return FilenameMatcher("*.cpp", "*.h", "*.hpp", "*.c", "*.hxx", "*.cc", "*.cxx")
|
| 107 |
+
case self.KOTLIN:
|
| 108 |
+
return FilenameMatcher("*.kt", "*.kts")
|
| 109 |
+
case self.DART:
|
| 110 |
+
return FilenameMatcher("*.dart")
|
| 111 |
+
case self.PHP:
|
| 112 |
+
return FilenameMatcher("*.php")
|
| 113 |
+
case self.R:
|
| 114 |
+
return FilenameMatcher("*.R", "*.r", "*.Rmd", "*.Rnw")
|
| 115 |
+
case self.CLOJURE:
|
| 116 |
+
return FilenameMatcher("*.clj", "*.cljs", "*.cljc", "*.edn") # codespell:ignore edn
|
| 117 |
+
case self.ELIXIR:
|
| 118 |
+
return FilenameMatcher("*.ex", "*.exs")
|
| 119 |
+
case self.TERRAFORM:
|
| 120 |
+
return FilenameMatcher("*.tf", "*.tfvars", "*.tfstate")
|
| 121 |
+
case self.SWIFT:
|
| 122 |
+
return FilenameMatcher("*.swift")
|
| 123 |
+
case self.BASH:
|
| 124 |
+
return FilenameMatcher("*.sh", "*.bash")
|
| 125 |
+
case self.ZIG:
|
| 126 |
+
return FilenameMatcher("*.zig", "*.zon")
|
| 127 |
+
case self.LUA:
|
| 128 |
+
return FilenameMatcher("*.lua")
|
| 129 |
+
case self.NIX:
|
| 130 |
+
return FilenameMatcher("*.nix")
|
| 131 |
+
case self.ERLANG:
|
| 132 |
+
return FilenameMatcher("*.erl", "*.hrl", "*.escript", "*.config", "*.app", "*.app.src")
|
| 133 |
+
case _:
|
| 134 |
+
raise ValueError(f"Unhandled language: {self}")
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
@dataclass
|
| 138 |
+
class LanguageServerConfig:
|
| 139 |
+
"""
|
| 140 |
+
Configuration parameters
|
| 141 |
+
"""
|
| 142 |
+
|
| 143 |
+
code_language: Language
|
| 144 |
+
trace_lsp_communication: bool = False
|
| 145 |
+
start_independent_lsp_process: bool = True
|
| 146 |
+
ignored_paths: list[str] = field(default_factory=list)
|
| 147 |
+
"""Paths, dirs or glob-like patterns. The matching will follow the same logic as for .gitignore entries"""
|
| 148 |
+
|
| 149 |
+
@classmethod
|
| 150 |
+
def from_dict(cls, env: dict):
|
| 151 |
+
"""
|
| 152 |
+
Create a MultilspyConfig instance from a dictionary
|
| 153 |
+
"""
|
| 154 |
+
import inspect
|
| 155 |
+
|
| 156 |
+
return cls(**{k: v for k, v in env.items() if k in inspect.signature(cls).parameters})
|
projects/ui/serena-new/src/solidlsp/ls_exceptions.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
This module contains the exceptions raised by the framework.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class SolidLSPException(Exception):
|
| 7 |
+
def __init__(self, message: str, cause: Exception | None = None):
|
| 8 |
+
"""
|
| 9 |
+
Initializes the exception with the given message.
|
| 10 |
+
|
| 11 |
+
:param message: the message describing the exception
|
| 12 |
+
:param cause: the original exception that caused this exception, if any.
|
| 13 |
+
For exceptions raised during request handling, this is typically
|
| 14 |
+
* an LSPError for errors returned by the LSP server
|
| 15 |
+
* LanguageServerTerminatedException for errors due to the language server having terminated.
|
| 16 |
+
"""
|
| 17 |
+
self.cause = cause
|
| 18 |
+
super().__init__(message)
|
| 19 |
+
|
| 20 |
+
def is_language_server_terminated(self):
|
| 21 |
+
"""
|
| 22 |
+
:return: True if the exception is caused by the language server having terminated as indicated
|
| 23 |
+
by the causing exception being an instance of LanguageServerTerminatedException.
|
| 24 |
+
"""
|
| 25 |
+
from .ls_handler import LanguageServerTerminatedException
|
| 26 |
+
|
| 27 |
+
return isinstance(self.cause, LanguageServerTerminatedException)
|
| 28 |
+
|
| 29 |
+
def __str__(self) -> str:
|
| 30 |
+
"""
|
| 31 |
+
Returns a string representation of the exception.
|
| 32 |
+
"""
|
| 33 |
+
s = super().__str__()
|
| 34 |
+
if self.cause:
|
| 35 |
+
if "\n" in s:
|
| 36 |
+
s += "\n"
|
| 37 |
+
else:
|
| 38 |
+
s += " "
|
| 39 |
+
s += f"(caused by {self.cause})"
|
| 40 |
+
return s
|
projects/ui/serena-new/src/solidlsp/ls_handler.py
ADDED
|
@@ -0,0 +1,581 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import json
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
import platform
|
| 6 |
+
import subprocess
|
| 7 |
+
import threading
|
| 8 |
+
import time
|
| 9 |
+
from collections.abc import Callable
|
| 10 |
+
from dataclasses import dataclass
|
| 11 |
+
from queue import Empty, Queue
|
| 12 |
+
from typing import Any
|
| 13 |
+
|
| 14 |
+
import psutil
|
| 15 |
+
from sensai.util.string import ToStringMixin
|
| 16 |
+
|
| 17 |
+
from solidlsp.ls_exceptions import SolidLSPException
|
| 18 |
+
from solidlsp.ls_request import LanguageServerRequest
|
| 19 |
+
from solidlsp.lsp_protocol_handler.lsp_requests import LspNotification
|
| 20 |
+
from solidlsp.lsp_protocol_handler.lsp_types import ErrorCodes
|
| 21 |
+
from solidlsp.lsp_protocol_handler.server import (
|
| 22 |
+
ENCODING,
|
| 23 |
+
LSPError,
|
| 24 |
+
MessageType,
|
| 25 |
+
PayloadLike,
|
| 26 |
+
ProcessLaunchInfo,
|
| 27 |
+
StringDict,
|
| 28 |
+
content_length,
|
| 29 |
+
create_message,
|
| 30 |
+
make_error_response,
|
| 31 |
+
make_notification,
|
| 32 |
+
make_request,
|
| 33 |
+
make_response,
|
| 34 |
+
)
|
| 35 |
+
from solidlsp.util.subprocess_util import subprocess_kwargs
|
| 36 |
+
|
| 37 |
+
log = logging.getLogger(__name__)
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class LanguageServerTerminatedException(Exception):
|
| 41 |
+
"""
|
| 42 |
+
Exception raised when the language server process has terminated unexpectedly.
|
| 43 |
+
"""
|
| 44 |
+
|
| 45 |
+
def __init__(self, message: str, cause: Exception | None = None) -> None:
|
| 46 |
+
super().__init__(message)
|
| 47 |
+
self.message = message
|
| 48 |
+
self.cause = cause
|
| 49 |
+
|
| 50 |
+
def __str__(self) -> str:
|
| 51 |
+
return f"LanguageServerTerminatedException: {self.message}" + (f"; Cause: {self.cause}" if self.cause else "")
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class Request(ToStringMixin):
|
| 55 |
+
|
| 56 |
+
@dataclass
|
| 57 |
+
class Result:
|
| 58 |
+
payload: PayloadLike | None = None
|
| 59 |
+
error: Exception | None = None
|
| 60 |
+
|
| 61 |
+
def is_error(self) -> bool:
|
| 62 |
+
return self.error is not None
|
| 63 |
+
|
| 64 |
+
def __init__(self, request_id: int, method: str) -> None:
|
| 65 |
+
self._request_id = request_id
|
| 66 |
+
self._method = method
|
| 67 |
+
self._status = "pending"
|
| 68 |
+
self._result_queue = Queue()
|
| 69 |
+
|
| 70 |
+
def _tostring_includes(self) -> list[str]:
|
| 71 |
+
return ["_request_id", "_status", "_method"]
|
| 72 |
+
|
| 73 |
+
def on_result(self, params: PayloadLike) -> None:
|
| 74 |
+
self._status = "completed"
|
| 75 |
+
self._result_queue.put(Request.Result(payload=params))
|
| 76 |
+
|
| 77 |
+
def on_error(self, err: Exception) -> None:
|
| 78 |
+
"""
|
| 79 |
+
:param err: the error that occurred while processing the request (typically an LSPError
|
| 80 |
+
for errors returned by the LS or LanguageServerTerminatedException if the error
|
| 81 |
+
is due to the language server process terminating unexpectedly).
|
| 82 |
+
"""
|
| 83 |
+
self._status = "error"
|
| 84 |
+
self._result_queue.put(Request.Result(error=err))
|
| 85 |
+
|
| 86 |
+
def get_result(self, timeout: float | None = None) -> Result:
|
| 87 |
+
try:
|
| 88 |
+
return self._result_queue.get(timeout=timeout)
|
| 89 |
+
except Empty as e:
|
| 90 |
+
if timeout is not None:
|
| 91 |
+
raise TimeoutError(f"Request timed out ({timeout=})") from e
|
| 92 |
+
raise e
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class SolidLanguageServerHandler:
|
| 96 |
+
"""
|
| 97 |
+
This class provides the implementation of Python client for the Language Server Protocol.
|
| 98 |
+
A class that launches the language server and communicates with it
|
| 99 |
+
using the Language Server Protocol (LSP).
|
| 100 |
+
|
| 101 |
+
It provides methods for sending requests, responses, and notifications to the server
|
| 102 |
+
and for registering handlers for requests and notifications from the server.
|
| 103 |
+
|
| 104 |
+
Uses JSON-RPC 2.0 for communication with the server over stdin/stdout.
|
| 105 |
+
|
| 106 |
+
Attributes:
|
| 107 |
+
send: A LspRequest object that can be used to send requests to the server and
|
| 108 |
+
await for the responses.
|
| 109 |
+
notify: A LspNotification object that can be used to send notifications to the server.
|
| 110 |
+
cmd: A string that represents the command to launch the language server process.
|
| 111 |
+
process: A subprocess.Popen object that represents the language server process.
|
| 112 |
+
request_id: An integer that represents the next available request id for the client.
|
| 113 |
+
_pending_requests: A dictionary that maps request ids to Request objects that
|
| 114 |
+
store the results or errors of the requests.
|
| 115 |
+
on_request_handlers: A dictionary that maps method names to callback functions
|
| 116 |
+
that handle requests from the server.
|
| 117 |
+
on_notification_handlers: A dictionary that maps method names to callback functions
|
| 118 |
+
that handle notifications from the server.
|
| 119 |
+
logger: An optional function that takes two strings (source and destination) and
|
| 120 |
+
a payload dictionary, and logs the communication between the client and the server.
|
| 121 |
+
tasks: A dictionary that maps task ids to asyncio.Task objects that represent
|
| 122 |
+
the asynchronous tasks created by the handler.
|
| 123 |
+
task_counter: An integer that represents the next available task id for the handler.
|
| 124 |
+
loop: An asyncio.AbstractEventLoop object that represents the event loop used by the handler.
|
| 125 |
+
start_independent_lsp_process: An optional boolean flag that indicates whether to start the
|
| 126 |
+
language server process in an independent process group. Default is `True`. Setting it to
|
| 127 |
+
`False` means that the language server process will be in the same process group as the
|
| 128 |
+
the current process, and any SIGINT and SIGTERM signals will be sent to both processes.
|
| 129 |
+
|
| 130 |
+
"""
|
| 131 |
+
|
| 132 |
+
def __init__(
|
| 133 |
+
self,
|
| 134 |
+
process_launch_info: ProcessLaunchInfo,
|
| 135 |
+
logger: Callable[[str, str, StringDict | str], None] | None = None,
|
| 136 |
+
start_independent_lsp_process=True,
|
| 137 |
+
request_timeout: float | None = None,
|
| 138 |
+
) -> None:
|
| 139 |
+
self.send = LanguageServerRequest(self)
|
| 140 |
+
self.notify = LspNotification(self.send_notification)
|
| 141 |
+
|
| 142 |
+
self.process_launch_info = process_launch_info
|
| 143 |
+
self.process: subprocess.Popen | None = None
|
| 144 |
+
self._is_shutting_down = False
|
| 145 |
+
|
| 146 |
+
self.request_id = 1
|
| 147 |
+
self._pending_requests: dict[Any, Request] = {}
|
| 148 |
+
self.on_request_handlers = {}
|
| 149 |
+
self.on_notification_handlers = {}
|
| 150 |
+
self.logger = logger
|
| 151 |
+
self.tasks = {}
|
| 152 |
+
self.task_counter = 0
|
| 153 |
+
self.loop = None
|
| 154 |
+
self.start_independent_lsp_process = start_independent_lsp_process
|
| 155 |
+
self._request_timeout = request_timeout
|
| 156 |
+
|
| 157 |
+
# Add thread locks for shared resources to prevent race conditions
|
| 158 |
+
self._stdin_lock = threading.Lock()
|
| 159 |
+
self._request_id_lock = threading.Lock()
|
| 160 |
+
self._response_handlers_lock = threading.Lock()
|
| 161 |
+
self._tasks_lock = threading.Lock()
|
| 162 |
+
|
| 163 |
+
def set_request_timeout(self, timeout: float | None) -> None:
|
| 164 |
+
"""
|
| 165 |
+
:param timeout: the timeout, in seconds, for all requests sent to the language server.
|
| 166 |
+
"""
|
| 167 |
+
self._request_timeout = timeout
|
| 168 |
+
|
| 169 |
+
def is_running(self) -> bool:
|
| 170 |
+
"""
|
| 171 |
+
Checks if the language server process is currently running.
|
| 172 |
+
"""
|
| 173 |
+
return self.process is not None and self.process.returncode is None
|
| 174 |
+
|
| 175 |
+
def start(self) -> None:
|
| 176 |
+
"""
|
| 177 |
+
Starts the language server process and creates a task to continuously read from its stdout to handle communications
|
| 178 |
+
from the server to the client
|
| 179 |
+
"""
|
| 180 |
+
child_proc_env = os.environ.copy()
|
| 181 |
+
child_proc_env.update(self.process_launch_info.env)
|
| 182 |
+
|
| 183 |
+
cmd = self.process_launch_info.cmd
|
| 184 |
+
is_windows = platform.system() == "Windows"
|
| 185 |
+
if not isinstance(cmd, str) and not is_windows:
|
| 186 |
+
# Since we are using the shell, we need to convert the command list to a single string
|
| 187 |
+
# on Linux/macOS
|
| 188 |
+
cmd = " ".join(cmd)
|
| 189 |
+
log.info("Starting language server process via command: %s", self.process_launch_info.cmd)
|
| 190 |
+
kwargs = subprocess_kwargs()
|
| 191 |
+
kwargs["start_new_session"] = self.start_independent_lsp_process
|
| 192 |
+
self.process = subprocess.Popen(
|
| 193 |
+
cmd,
|
| 194 |
+
stdout=subprocess.PIPE,
|
| 195 |
+
stdin=subprocess.PIPE,
|
| 196 |
+
stderr=subprocess.PIPE,
|
| 197 |
+
env=child_proc_env,
|
| 198 |
+
cwd=self.process_launch_info.cwd,
|
| 199 |
+
shell=True,
|
| 200 |
+
**kwargs,
|
| 201 |
+
)
|
| 202 |
+
|
| 203 |
+
# Check if process terminated immediately
|
| 204 |
+
if self.process.returncode is not None:
|
| 205 |
+
log.error("Language server has already terminated/could not be started")
|
| 206 |
+
# Process has already terminated
|
| 207 |
+
stderr_data = self.process.stderr.read()
|
| 208 |
+
error_message = stderr_data.decode("utf-8", errors="replace")
|
| 209 |
+
raise RuntimeError(f"Process terminated immediately with code {self.process.returncode}. Error: {error_message}")
|
| 210 |
+
|
| 211 |
+
# start threads to read stdout and stderr of the process
|
| 212 |
+
threading.Thread(
|
| 213 |
+
target=self._read_ls_process_stdout,
|
| 214 |
+
name="LSP-stdout-reader",
|
| 215 |
+
daemon=True,
|
| 216 |
+
).start()
|
| 217 |
+
threading.Thread(
|
| 218 |
+
target=self._read_ls_process_stderr,
|
| 219 |
+
name="LSP-stderr-reader",
|
| 220 |
+
daemon=True,
|
| 221 |
+
).start()
|
| 222 |
+
|
| 223 |
+
def stop(self) -> None:
|
| 224 |
+
"""
|
| 225 |
+
Sends the terminate signal to the language server process and waits for it to exit, with a timeout, killing it if necessary
|
| 226 |
+
"""
|
| 227 |
+
process = self.process
|
| 228 |
+
self.process = None
|
| 229 |
+
if process:
|
| 230 |
+
self._cleanup_process(process)
|
| 231 |
+
|
| 232 |
+
def _cleanup_process(self, process):
|
| 233 |
+
"""Clean up a process: close stdin, terminate/kill process, close stdout/stderr."""
|
| 234 |
+
# Close stdin first to prevent deadlocks
|
| 235 |
+
# See: https://bugs.python.org/issue35539
|
| 236 |
+
self._safely_close_pipe(process.stdin)
|
| 237 |
+
|
| 238 |
+
# Terminate/kill the process if it's still running
|
| 239 |
+
if process.returncode is None:
|
| 240 |
+
self._terminate_or_kill_process(process)
|
| 241 |
+
|
| 242 |
+
# Close stdout and stderr pipes after process has exited
|
| 243 |
+
# This is essential to prevent "I/O operation on closed pipe" errors and
|
| 244 |
+
# "Event loop is closed" errors during garbage collection
|
| 245 |
+
# See: https://bugs.python.org/issue41320 and https://github.com/python/cpython/issues/88050
|
| 246 |
+
self._safely_close_pipe(process.stdout)
|
| 247 |
+
self._safely_close_pipe(process.stderr)
|
| 248 |
+
|
| 249 |
+
def _safely_close_pipe(self, pipe):
|
| 250 |
+
"""Safely close a pipe, ignoring any exceptions."""
|
| 251 |
+
if pipe:
|
| 252 |
+
try:
|
| 253 |
+
pipe.close()
|
| 254 |
+
except Exception:
|
| 255 |
+
pass
|
| 256 |
+
|
| 257 |
+
def _terminate_or_kill_process(self, process):
|
| 258 |
+
"""Try to terminate the process gracefully, then forcefully if necessary."""
|
| 259 |
+
# First try to terminate the process tree gracefully
|
| 260 |
+
self._signal_process_tree(process, terminate=True)
|
| 261 |
+
|
| 262 |
+
def _signal_process_tree(self, process, terminate=True):
|
| 263 |
+
"""Send signal (terminate or kill) to the process and all its children."""
|
| 264 |
+
signal_method = "terminate" if terminate else "kill"
|
| 265 |
+
|
| 266 |
+
# Try to get the parent process
|
| 267 |
+
parent = None
|
| 268 |
+
try:
|
| 269 |
+
parent = psutil.Process(process.pid)
|
| 270 |
+
except (psutil.NoSuchProcess, psutil.AccessDenied, Exception):
|
| 271 |
+
pass
|
| 272 |
+
|
| 273 |
+
# If we have the parent process and it's running, signal the entire tree
|
| 274 |
+
if parent and parent.is_running():
|
| 275 |
+
# Signal children first
|
| 276 |
+
for child in parent.children(recursive=True):
|
| 277 |
+
try:
|
| 278 |
+
getattr(child, signal_method)()
|
| 279 |
+
except (psutil.NoSuchProcess, psutil.AccessDenied, Exception):
|
| 280 |
+
pass
|
| 281 |
+
|
| 282 |
+
# Then signal the parent
|
| 283 |
+
try:
|
| 284 |
+
getattr(parent, signal_method)()
|
| 285 |
+
except (psutil.NoSuchProcess, psutil.AccessDenied, Exception):
|
| 286 |
+
pass
|
| 287 |
+
else:
|
| 288 |
+
# Fall back to direct process signaling
|
| 289 |
+
try:
|
| 290 |
+
getattr(process, signal_method)()
|
| 291 |
+
except Exception:
|
| 292 |
+
pass
|
| 293 |
+
|
| 294 |
+
def shutdown(self) -> None:
|
| 295 |
+
"""
|
| 296 |
+
Perform the shutdown sequence for the client, including sending the shutdown request to the server and notifying it of exit
|
| 297 |
+
"""
|
| 298 |
+
self._is_shutting_down = True
|
| 299 |
+
self._log("Sending shutdown request to server")
|
| 300 |
+
self.send.shutdown()
|
| 301 |
+
self._log("Received shutdown response from server")
|
| 302 |
+
self._log("Sending exit notification to server")
|
| 303 |
+
self.notify.exit()
|
| 304 |
+
self._log("Sent exit notification to server")
|
| 305 |
+
|
| 306 |
+
def _log(self, message: str | StringDict) -> None:
|
| 307 |
+
"""
|
| 308 |
+
Create a log message
|
| 309 |
+
"""
|
| 310 |
+
if self.logger is not None:
|
| 311 |
+
self.logger("client", "logger", message)
|
| 312 |
+
|
| 313 |
+
@staticmethod
|
| 314 |
+
def _read_bytes_from_process(process, stream, num_bytes):
|
| 315 |
+
"""Read exactly num_bytes from process stdout"""
|
| 316 |
+
data = b""
|
| 317 |
+
while len(data) < num_bytes:
|
| 318 |
+
chunk = stream.read(num_bytes - len(data))
|
| 319 |
+
if not chunk:
|
| 320 |
+
if process.poll() is not None:
|
| 321 |
+
raise LanguageServerTerminatedException(
|
| 322 |
+
f"Process terminated while trying to read response (read {num_bytes} of {len(data)} bytes before termination)"
|
| 323 |
+
)
|
| 324 |
+
# Process still running but no data available yet, retry after a short delay
|
| 325 |
+
time.sleep(0.01)
|
| 326 |
+
continue
|
| 327 |
+
data += chunk
|
| 328 |
+
return data
|
| 329 |
+
|
| 330 |
+
def _read_ls_process_stdout(self) -> None:
|
| 331 |
+
"""
|
| 332 |
+
Continuously read from the language server process stdout and handle the messages
|
| 333 |
+
invoking the registered response and notification handlers
|
| 334 |
+
"""
|
| 335 |
+
exception: Exception | None = None
|
| 336 |
+
try:
|
| 337 |
+
while self.process and self.process.stdout:
|
| 338 |
+
if self.process.poll() is not None: # process has terminated
|
| 339 |
+
break
|
| 340 |
+
line = self.process.stdout.readline()
|
| 341 |
+
if not line:
|
| 342 |
+
continue
|
| 343 |
+
try:
|
| 344 |
+
num_bytes = content_length(line)
|
| 345 |
+
except ValueError:
|
| 346 |
+
continue
|
| 347 |
+
if num_bytes is None:
|
| 348 |
+
continue
|
| 349 |
+
while line and line.strip():
|
| 350 |
+
line = self.process.stdout.readline()
|
| 351 |
+
if not line:
|
| 352 |
+
continue
|
| 353 |
+
body = self._read_bytes_from_process(self.process, self.process.stdout, num_bytes)
|
| 354 |
+
|
| 355 |
+
self._handle_body(body)
|
| 356 |
+
except LanguageServerTerminatedException as e:
|
| 357 |
+
exception = e
|
| 358 |
+
except (BrokenPipeError, ConnectionResetError) as e:
|
| 359 |
+
exception = LanguageServerTerminatedException("Language server process terminated while reading stdout", cause=e)
|
| 360 |
+
except Exception as e:
|
| 361 |
+
exception = LanguageServerTerminatedException("Unexpected error while reading stdout from language server process", cause=e)
|
| 362 |
+
log.info("Language server stdout reader thread has terminated")
|
| 363 |
+
if not self._is_shutting_down:
|
| 364 |
+
if exception is None:
|
| 365 |
+
exception = LanguageServerTerminatedException("Language server stdout read process terminated unexpectedly")
|
| 366 |
+
log.error(str(exception))
|
| 367 |
+
self._cancel_pending_requests(exception)
|
| 368 |
+
|
| 369 |
+
def _read_ls_process_stderr(self) -> None:
|
| 370 |
+
"""
|
| 371 |
+
Continuously read from the language server process stderr and log the messages
|
| 372 |
+
"""
|
| 373 |
+
try:
|
| 374 |
+
while self.process and self.process.stderr:
|
| 375 |
+
if self.process.poll() is not None:
|
| 376 |
+
# process has terminated
|
| 377 |
+
break
|
| 378 |
+
line = self.process.stderr.readline()
|
| 379 |
+
if not line:
|
| 380 |
+
continue
|
| 381 |
+
line = line.decode(ENCODING, errors="replace")
|
| 382 |
+
line_lower = line.lower()
|
| 383 |
+
if "error" in line_lower or "exception" in line_lower or line.startswith("E["):
|
| 384 |
+
level = logging.ERROR
|
| 385 |
+
else:
|
| 386 |
+
level = logging.INFO
|
| 387 |
+
log.log(level, line)
|
| 388 |
+
except Exception as e:
|
| 389 |
+
log.error("Error while reading stderr from language server process: %s", e, exc_info=e)
|
| 390 |
+
if not self._is_shutting_down:
|
| 391 |
+
log.error("Language server stderr reader thread terminated unexpectedly")
|
| 392 |
+
else:
|
| 393 |
+
log.info("Language server stderr reader thread has terminated")
|
| 394 |
+
|
| 395 |
+
def _handle_body(self, body: bytes) -> None:
|
| 396 |
+
"""
|
| 397 |
+
Parse the body text received from the language server process and invoke the appropriate handler
|
| 398 |
+
"""
|
| 399 |
+
try:
|
| 400 |
+
self._receive_payload(json.loads(body))
|
| 401 |
+
except OSError as ex:
|
| 402 |
+
self._log(f"malformed {ENCODING}: {ex}")
|
| 403 |
+
except UnicodeDecodeError as ex:
|
| 404 |
+
self._log(f"malformed {ENCODING}: {ex}")
|
| 405 |
+
except json.JSONDecodeError as ex:
|
| 406 |
+
self._log(f"malformed JSON: {ex}")
|
| 407 |
+
|
| 408 |
+
def _receive_payload(self, payload: StringDict) -> None:
|
| 409 |
+
"""
|
| 410 |
+
Determine if the payload received from server is for a request, response, or notification and invoke the appropriate handler
|
| 411 |
+
"""
|
| 412 |
+
if self.logger:
|
| 413 |
+
self.logger("server", "client", payload)
|
| 414 |
+
try:
|
| 415 |
+
if "method" in payload:
|
| 416 |
+
if "id" in payload:
|
| 417 |
+
self._request_handler(payload)
|
| 418 |
+
else:
|
| 419 |
+
self._notification_handler(payload)
|
| 420 |
+
elif "id" in payload:
|
| 421 |
+
self._response_handler(payload)
|
| 422 |
+
else:
|
| 423 |
+
self._log(f"Unknown payload type: {payload}")
|
| 424 |
+
except Exception as err:
|
| 425 |
+
self._log(f"Error handling server payload: {err}")
|
| 426 |
+
|
| 427 |
+
def send_notification(self, method: str, params: dict | None = None) -> None:
|
| 428 |
+
"""
|
| 429 |
+
Send notification pertaining to the given method to the server with the given parameters
|
| 430 |
+
"""
|
| 431 |
+
self._send_payload(make_notification(method, params))
|
| 432 |
+
|
| 433 |
+
def send_response(self, request_id: Any, params: PayloadLike) -> None:
|
| 434 |
+
"""
|
| 435 |
+
Send response to the given request id to the server with the given parameters
|
| 436 |
+
"""
|
| 437 |
+
self._send_payload(make_response(request_id, params))
|
| 438 |
+
|
| 439 |
+
def send_error_response(self, request_id: Any, err: LSPError) -> None:
|
| 440 |
+
"""
|
| 441 |
+
Send error response to the given request id to the server with the given error
|
| 442 |
+
"""
|
| 443 |
+
# Use lock to prevent race conditions on tasks and task_counter
|
| 444 |
+
self._send_payload(make_error_response(request_id, err))
|
| 445 |
+
|
| 446 |
+
def _cancel_pending_requests(self, exception: Exception) -> None:
|
| 447 |
+
"""
|
| 448 |
+
Cancel all pending requests by setting their results to an error
|
| 449 |
+
"""
|
| 450 |
+
with self._response_handlers_lock:
|
| 451 |
+
log.info("Cancelling %d pending language server requests", len(self._pending_requests))
|
| 452 |
+
for request in self._pending_requests.values():
|
| 453 |
+
log.info("Cancelling %s", request)
|
| 454 |
+
request.on_error(exception)
|
| 455 |
+
self._pending_requests.clear()
|
| 456 |
+
|
| 457 |
+
def send_request(self, method: str, params: dict | None = None) -> PayloadLike:
|
| 458 |
+
"""
|
| 459 |
+
Send request to the server, register the request id, and wait for the response
|
| 460 |
+
"""
|
| 461 |
+
with self._request_id_lock:
|
| 462 |
+
request_id = self.request_id
|
| 463 |
+
self.request_id += 1
|
| 464 |
+
|
| 465 |
+
request = Request(request_id=request_id, method=method)
|
| 466 |
+
log.debug("Starting: %s", request)
|
| 467 |
+
|
| 468 |
+
with self._response_handlers_lock:
|
| 469 |
+
self._pending_requests[request_id] = request
|
| 470 |
+
|
| 471 |
+
self._send_payload(make_request(method, request_id, params))
|
| 472 |
+
|
| 473 |
+
self._log(f"Waiting for response to request {method} with params:\n{params}")
|
| 474 |
+
result = request.get_result(timeout=self._request_timeout)
|
| 475 |
+
log.debug("Completed: %s", request)
|
| 476 |
+
|
| 477 |
+
self._log("Processing result")
|
| 478 |
+
if result.is_error():
|
| 479 |
+
raise SolidLSPException(f"Error processing request {method} with params:\n{params}", cause=result.error) from result.error
|
| 480 |
+
|
| 481 |
+
self._log(f"Returning non-error result, which is:\n{result.payload}")
|
| 482 |
+
return result.payload
|
| 483 |
+
|
| 484 |
+
def _send_payload(self, payload: StringDict) -> None:
|
| 485 |
+
"""
|
| 486 |
+
Send the payload to the server by writing to its stdin asynchronously.
|
| 487 |
+
"""
|
| 488 |
+
if not self.process or not self.process.stdin:
|
| 489 |
+
return
|
| 490 |
+
self._log(payload)
|
| 491 |
+
msg = create_message(payload)
|
| 492 |
+
|
| 493 |
+
# Use lock to prevent concurrent writes to stdin that cause buffer corruption
|
| 494 |
+
with self._stdin_lock:
|
| 495 |
+
try:
|
| 496 |
+
self.process.stdin.writelines(msg)
|
| 497 |
+
self.process.stdin.flush()
|
| 498 |
+
except (BrokenPipeError, ConnectionResetError, OSError) as e:
|
| 499 |
+
# Log the error but don't raise to prevent cascading failures
|
| 500 |
+
if self.logger:
|
| 501 |
+
self.logger("client", "logger", f"Failed to write to stdin: {e}")
|
| 502 |
+
return
|
| 503 |
+
|
| 504 |
+
def on_request(self, method: str, cb) -> None:
|
| 505 |
+
"""
|
| 506 |
+
Register the callback function to handle requests from the server to the client for the given method
|
| 507 |
+
"""
|
| 508 |
+
self.on_request_handlers[method] = cb
|
| 509 |
+
|
| 510 |
+
def on_notification(self, method: str, cb) -> None:
|
| 511 |
+
"""
|
| 512 |
+
Register the callback function to handle notifications from the server to the client for the given method
|
| 513 |
+
"""
|
| 514 |
+
self.on_notification_handlers[method] = cb
|
| 515 |
+
|
| 516 |
+
def _response_handler(self, response: StringDict) -> None:
|
| 517 |
+
"""
|
| 518 |
+
Handle the response received from the server for a request, using the id to determine the request
|
| 519 |
+
"""
|
| 520 |
+
with self._response_handlers_lock:
|
| 521 |
+
request = self._pending_requests.pop(response["id"])
|
| 522 |
+
|
| 523 |
+
if "result" in response and "error" not in response:
|
| 524 |
+
request.on_result(response["result"])
|
| 525 |
+
elif "result" not in response and "error" in response:
|
| 526 |
+
request.on_error(LSPError.from_lsp(response["error"]))
|
| 527 |
+
else:
|
| 528 |
+
request.on_error(LSPError(ErrorCodes.InvalidRequest, ""))
|
| 529 |
+
|
| 530 |
+
def _request_handler(self, response: StringDict) -> None:
|
| 531 |
+
"""
|
| 532 |
+
Handle the request received from the server: call the appropriate callback function and return the result
|
| 533 |
+
"""
|
| 534 |
+
method = response.get("method", "")
|
| 535 |
+
params = response.get("params")
|
| 536 |
+
request_id = response.get("id")
|
| 537 |
+
handler = self.on_request_handlers.get(method)
|
| 538 |
+
if not handler:
|
| 539 |
+
self.send_error_response(
|
| 540 |
+
request_id,
|
| 541 |
+
LSPError(
|
| 542 |
+
ErrorCodes.MethodNotFound,
|
| 543 |
+
f"method '{method}' not handled on client.",
|
| 544 |
+
),
|
| 545 |
+
)
|
| 546 |
+
return
|
| 547 |
+
try:
|
| 548 |
+
self.send_response(request_id, handler(params))
|
| 549 |
+
except LSPError as ex:
|
| 550 |
+
self.send_error_response(request_id, ex)
|
| 551 |
+
except Exception as ex:
|
| 552 |
+
self.send_error_response(request_id, LSPError(ErrorCodes.InternalError, str(ex)))
|
| 553 |
+
|
| 554 |
+
def _notification_handler(self, response: StringDict) -> None:
|
| 555 |
+
"""
|
| 556 |
+
Handle the notification received from the server: call the appropriate callback function
|
| 557 |
+
"""
|
| 558 |
+
method = response.get("method", "")
|
| 559 |
+
params = response.get("params")
|
| 560 |
+
handler = self.on_notification_handlers.get(method)
|
| 561 |
+
if not handler:
|
| 562 |
+
self._log(f"unhandled {method}")
|
| 563 |
+
return
|
| 564 |
+
try:
|
| 565 |
+
handler(params)
|
| 566 |
+
except asyncio.CancelledError:
|
| 567 |
+
return
|
| 568 |
+
except Exception as ex:
|
| 569 |
+
if (not self._is_shutting_down) and self.logger:
|
| 570 |
+
self.logger(
|
| 571 |
+
"client",
|
| 572 |
+
"logger",
|
| 573 |
+
str(
|
| 574 |
+
{
|
| 575 |
+
"type": MessageType.error,
|
| 576 |
+
"message": str(ex),
|
| 577 |
+
"method": method,
|
| 578 |
+
"params": params,
|
| 579 |
+
}
|
| 580 |
+
),
|
| 581 |
+
)
|
projects/ui/serena-new/src/solidlsp/ls_logger.py
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Multilspy logger module.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import inspect
|
| 6 |
+
import logging
|
| 7 |
+
from datetime import datetime
|
| 8 |
+
|
| 9 |
+
from pydantic import BaseModel
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class LogLine(BaseModel):
|
| 13 |
+
"""
|
| 14 |
+
Represents a line in the Multilspy log
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
time: str
|
| 18 |
+
level: str
|
| 19 |
+
caller_file: str
|
| 20 |
+
caller_name: str
|
| 21 |
+
caller_line: int
|
| 22 |
+
message: str
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class LanguageServerLogger:
|
| 26 |
+
"""
|
| 27 |
+
Logger class
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
def __init__(self, json_format: bool = False, log_level: int = logging.INFO) -> None:
|
| 31 |
+
self.logger = logging.getLogger("solidlsp")
|
| 32 |
+
self.logger.setLevel(log_level)
|
| 33 |
+
self.json_format = json_format
|
| 34 |
+
|
| 35 |
+
def log(self, debug_message: str, level: int, sanitized_error_message: str = "", stacklevel: int = 2) -> None:
|
| 36 |
+
"""
|
| 37 |
+
Log the debug and sanitized messages using the logger
|
| 38 |
+
"""
|
| 39 |
+
debug_message = debug_message.replace("'", '"').replace("\n", " ")
|
| 40 |
+
sanitized_error_message = sanitized_error_message.replace("'", '"').replace("\n", " ")
|
| 41 |
+
|
| 42 |
+
# Collect details about the callee
|
| 43 |
+
curframe = inspect.currentframe()
|
| 44 |
+
calframe = inspect.getouterframes(curframe, 2)
|
| 45 |
+
caller_file = calframe[1][1].split("/")[-1]
|
| 46 |
+
caller_line = calframe[1][2]
|
| 47 |
+
caller_name = calframe[1][3]
|
| 48 |
+
|
| 49 |
+
if self.json_format:
|
| 50 |
+
# Construct the debug log line
|
| 51 |
+
debug_log_line = LogLine(
|
| 52 |
+
time=str(datetime.now().strftime("%Y-%m-%d %H:%M:%S")),
|
| 53 |
+
level=logging.getLevelName(level),
|
| 54 |
+
caller_file=caller_file,
|
| 55 |
+
caller_name=caller_name,
|
| 56 |
+
caller_line=caller_line,
|
| 57 |
+
message=debug_message,
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
self.logger.log(
|
| 61 |
+
level=level,
|
| 62 |
+
msg=debug_log_line.json(),
|
| 63 |
+
stacklevel=stacklevel,
|
| 64 |
+
)
|
| 65 |
+
else:
|
| 66 |
+
self.logger.log(level, debug_message, stacklevel=stacklevel)
|
projects/ui/serena-new/src/solidlsp/ls_request.py
ADDED
|
@@ -0,0 +1,383 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import TYPE_CHECKING, Any, Union
|
| 2 |
+
|
| 3 |
+
from solidlsp.lsp_protocol_handler import lsp_types
|
| 4 |
+
|
| 5 |
+
if TYPE_CHECKING:
|
| 6 |
+
from .ls_handler import SolidLanguageServerHandler
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class LanguageServerRequest:
|
| 10 |
+
def __init__(self, handler: "SolidLanguageServerHandler"):
|
| 11 |
+
self.handler = handler
|
| 12 |
+
|
| 13 |
+
def _send_request(self, method: str, params: Any | None = None) -> Any:
|
| 14 |
+
return self.handler.send_request(method, params)
|
| 15 |
+
|
| 16 |
+
def implementation(self, params: lsp_types.ImplementationParams) -> Union["lsp_types.Definition", list["lsp_types.LocationLink"], None]:
|
| 17 |
+
"""A request to resolve the implementation locations of a symbol at a given text
|
| 18 |
+
document position. The request's parameter is of type [TextDocumentPositionParams]
|
| 19 |
+
(#TextDocumentPositionParams) the response is of type {@link Definition} or a
|
| 20 |
+
Thenable that resolves to such.
|
| 21 |
+
"""
|
| 22 |
+
return self._send_request("textDocument/implementation", params)
|
| 23 |
+
|
| 24 |
+
def type_definition(
|
| 25 |
+
self, params: lsp_types.TypeDefinitionParams
|
| 26 |
+
) -> Union["lsp_types.Definition", list["lsp_types.LocationLink"], None]:
|
| 27 |
+
"""A request to resolve the type definition locations of a symbol at a given text
|
| 28 |
+
document position. The request's parameter is of type [TextDocumentPositionParams]
|
| 29 |
+
(#TextDocumentPositionParams) the response is of type {@link Definition} or a
|
| 30 |
+
Thenable that resolves to such.
|
| 31 |
+
"""
|
| 32 |
+
return self._send_request("textDocument/typeDefinition", params)
|
| 33 |
+
|
| 34 |
+
def document_color(self, params: lsp_types.DocumentColorParams) -> list["lsp_types.ColorInformation"]:
|
| 35 |
+
"""A request to list all color symbols found in a given text document. The request's
|
| 36 |
+
parameter is of type {@link DocumentColorParams} the
|
| 37 |
+
response is of type {@link ColorInformation ColorInformation[]} or a Thenable
|
| 38 |
+
that resolves to such.
|
| 39 |
+
"""
|
| 40 |
+
return self._send_request("textDocument/documentColor", params)
|
| 41 |
+
|
| 42 |
+
def color_presentation(self, params: lsp_types.ColorPresentationParams) -> list["lsp_types.ColorPresentation"]:
|
| 43 |
+
"""A request to list all presentation for a color. The request's
|
| 44 |
+
parameter is of type {@link ColorPresentationParams} the
|
| 45 |
+
response is of type {@link ColorInformation ColorInformation[]} or a Thenable
|
| 46 |
+
that resolves to such.
|
| 47 |
+
"""
|
| 48 |
+
return self._send_request("textDocument/colorPresentation", params)
|
| 49 |
+
|
| 50 |
+
def folding_range(self, params: lsp_types.FoldingRangeParams) -> list["lsp_types.FoldingRange"] | None:
|
| 51 |
+
"""A request to provide folding ranges in a document. The request's
|
| 52 |
+
parameter is of type {@link FoldingRangeParams}, the
|
| 53 |
+
response is of type {@link FoldingRangeList} or a Thenable
|
| 54 |
+
that resolves to such.
|
| 55 |
+
"""
|
| 56 |
+
return self._send_request("textDocument/foldingRange", params)
|
| 57 |
+
|
| 58 |
+
def declaration(self, params: lsp_types.DeclarationParams) -> Union["lsp_types.Declaration", list["lsp_types.LocationLink"], None]:
|
| 59 |
+
"""A request to resolve the type definition locations of a symbol at a given text
|
| 60 |
+
document position. The request's parameter is of type [TextDocumentPositionParams]
|
| 61 |
+
(#TextDocumentPositionParams) the response is of type {@link Declaration}
|
| 62 |
+
or a typed array of {@link DeclarationLink} or a Thenable that resolves
|
| 63 |
+
to such.
|
| 64 |
+
"""
|
| 65 |
+
return self._send_request("textDocument/declaration", params)
|
| 66 |
+
|
| 67 |
+
def selection_range(self, params: lsp_types.SelectionRangeParams) -> list["lsp_types.SelectionRange"] | None:
|
| 68 |
+
"""A request to provide selection ranges in a document. The request's
|
| 69 |
+
parameter is of type {@link SelectionRangeParams}, the
|
| 70 |
+
response is of type {@link SelectionRange SelectionRange[]} or a Thenable
|
| 71 |
+
that resolves to such.
|
| 72 |
+
"""
|
| 73 |
+
return self._send_request("textDocument/selectionRange", params)
|
| 74 |
+
|
| 75 |
+
def prepare_call_hierarchy(self, params: lsp_types.CallHierarchyPrepareParams) -> list["lsp_types.CallHierarchyItem"] | None:
|
| 76 |
+
"""A request to result a `CallHierarchyItem` in a document at a given position.
|
| 77 |
+
Can be used as an input to an incoming or outgoing call hierarchy.
|
| 78 |
+
|
| 79 |
+
@since 3.16.0
|
| 80 |
+
"""
|
| 81 |
+
return self._send_request("textDocument/prepareCallHierarchy", params)
|
| 82 |
+
|
| 83 |
+
def incoming_calls(self, params: lsp_types.CallHierarchyIncomingCallsParams) -> list["lsp_types.CallHierarchyIncomingCall"] | None:
|
| 84 |
+
"""A request to resolve the incoming calls for a given `CallHierarchyItem`.
|
| 85 |
+
|
| 86 |
+
@since 3.16.0
|
| 87 |
+
"""
|
| 88 |
+
return self._send_request("callHierarchy/incomingCalls", params)
|
| 89 |
+
|
| 90 |
+
def outgoing_calls(self, params: lsp_types.CallHierarchyOutgoingCallsParams) -> list["lsp_types.CallHierarchyOutgoingCall"] | None:
|
| 91 |
+
"""A request to resolve the outgoing calls for a given `CallHierarchyItem`.
|
| 92 |
+
|
| 93 |
+
@since 3.16.0
|
| 94 |
+
"""
|
| 95 |
+
return self._send_request("callHierarchy/outgoingCalls", params)
|
| 96 |
+
|
| 97 |
+
def semantic_tokens_full(self, params: lsp_types.SemanticTokensParams) -> Union["lsp_types.SemanticTokens", None]:
|
| 98 |
+
"""@since 3.16.0"""
|
| 99 |
+
return self._send_request("textDocument/semanticTokens/full", params)
|
| 100 |
+
|
| 101 |
+
def semantic_tokens_delta(
|
| 102 |
+
self, params: lsp_types.SemanticTokensDeltaParams
|
| 103 |
+
) -> Union["lsp_types.SemanticTokens", "lsp_types.SemanticTokensDelta", None]:
|
| 104 |
+
"""@since 3.16.0"""
|
| 105 |
+
return self._send_request("textDocument/semanticTokens/full/delta", params)
|
| 106 |
+
|
| 107 |
+
def semantic_tokens_range(self, params: lsp_types.SemanticTokensRangeParams) -> Union["lsp_types.SemanticTokens", None]:
|
| 108 |
+
"""@since 3.16.0"""
|
| 109 |
+
return self._send_request("textDocument/semanticTokens/range", params)
|
| 110 |
+
|
| 111 |
+
def linked_editing_range(self, params: lsp_types.LinkedEditingRangeParams) -> Union["lsp_types.LinkedEditingRanges", None]:
|
| 112 |
+
"""A request to provide ranges that can be edited together.
|
| 113 |
+
|
| 114 |
+
@since 3.16.0
|
| 115 |
+
"""
|
| 116 |
+
return self._send_request("textDocument/linkedEditingRange", params)
|
| 117 |
+
|
| 118 |
+
def will_create_files(self, params: lsp_types.CreateFilesParams) -> Union["lsp_types.WorkspaceEdit", None]:
|
| 119 |
+
"""The will create files request is sent from the client to the server before files are actually
|
| 120 |
+
created as long as the creation is triggered from within the client.
|
| 121 |
+
|
| 122 |
+
@since 3.16.0
|
| 123 |
+
"""
|
| 124 |
+
return self._send_request("workspace/willCreateFiles", params)
|
| 125 |
+
|
| 126 |
+
def will_rename_files(self, params: lsp_types.RenameFilesParams) -> Union["lsp_types.WorkspaceEdit", None]:
|
| 127 |
+
"""The will rename files request is sent from the client to the server before files are actually
|
| 128 |
+
renamed as long as the rename is triggered from within the client.
|
| 129 |
+
|
| 130 |
+
@since 3.16.0
|
| 131 |
+
"""
|
| 132 |
+
return self._send_request("workspace/willRenameFiles", params)
|
| 133 |
+
|
| 134 |
+
def will_delete_files(self, params: lsp_types.DeleteFilesParams) -> Union["lsp_types.WorkspaceEdit", None]:
|
| 135 |
+
"""The did delete files notification is sent from the client to the server when
|
| 136 |
+
files were deleted from within the client.
|
| 137 |
+
|
| 138 |
+
@since 3.16.0
|
| 139 |
+
"""
|
| 140 |
+
return self._send_request("workspace/willDeleteFiles", params)
|
| 141 |
+
|
| 142 |
+
def moniker(self, params: lsp_types.MonikerParams) -> list["lsp_types.Moniker"] | None:
|
| 143 |
+
"""A request to get the moniker of a symbol at a given text document position.
|
| 144 |
+
The request parameter is of type {@link TextDocumentPositionParams}.
|
| 145 |
+
The response is of type {@link Moniker Moniker[]} or `null`.
|
| 146 |
+
"""
|
| 147 |
+
return self._send_request("textDocument/moniker", params)
|
| 148 |
+
|
| 149 |
+
def prepare_type_hierarchy(self, params: lsp_types.TypeHierarchyPrepareParams) -> list["lsp_types.TypeHierarchyItem"] | None:
|
| 150 |
+
"""A request to result a `TypeHierarchyItem` in a document at a given position.
|
| 151 |
+
Can be used as an input to a subtypes or supertypes type hierarchy.
|
| 152 |
+
|
| 153 |
+
@since 3.17.0
|
| 154 |
+
"""
|
| 155 |
+
return self._send_request("textDocument/prepareTypeHierarchy", params)
|
| 156 |
+
|
| 157 |
+
def type_hierarchy_supertypes(self, params: lsp_types.TypeHierarchySupertypesParams) -> list["lsp_types.TypeHierarchyItem"] | None:
|
| 158 |
+
"""A request to resolve the supertypes for a given `TypeHierarchyItem`.
|
| 159 |
+
|
| 160 |
+
@since 3.17.0
|
| 161 |
+
"""
|
| 162 |
+
return self._send_request("typeHierarchy/supertypes", params)
|
| 163 |
+
|
| 164 |
+
def type_hierarchy_subtypes(self, params: lsp_types.TypeHierarchySubtypesParams) -> list["lsp_types.TypeHierarchyItem"] | None:
|
| 165 |
+
"""A request to resolve the subtypes for a given `TypeHierarchyItem`.
|
| 166 |
+
|
| 167 |
+
@since 3.17.0
|
| 168 |
+
"""
|
| 169 |
+
return self._send_request("typeHierarchy/subtypes", params)
|
| 170 |
+
|
| 171 |
+
def inline_value(self, params: lsp_types.InlineValueParams) -> list["lsp_types.InlineValue"] | None:
|
| 172 |
+
"""A request to provide inline values in a document. The request's parameter is of
|
| 173 |
+
type {@link InlineValueParams}, the response is of type
|
| 174 |
+
{@link InlineValue InlineValue[]} or a Thenable that resolves to such.
|
| 175 |
+
|
| 176 |
+
@since 3.17.0
|
| 177 |
+
"""
|
| 178 |
+
return self._send_request("textDocument/inlineValue", params)
|
| 179 |
+
|
| 180 |
+
def inlay_hint(self, params: lsp_types.InlayHintParams) -> list["lsp_types.InlayHint"] | None:
|
| 181 |
+
"""A request to provide inlay hints in a document. The request's parameter is of
|
| 182 |
+
type {@link InlayHintsParams}, the response is of type
|
| 183 |
+
{@link InlayHint InlayHint[]} or a Thenable that resolves to such.
|
| 184 |
+
|
| 185 |
+
@since 3.17.0
|
| 186 |
+
"""
|
| 187 |
+
return self._send_request("textDocument/inlayHint", params)
|
| 188 |
+
|
| 189 |
+
def resolve_inlay_hint(self, params: lsp_types.InlayHint) -> "lsp_types.InlayHint":
|
| 190 |
+
"""A request to resolve additional properties for an inlay hint.
|
| 191 |
+
The request's parameter is of type {@link InlayHint}, the response is
|
| 192 |
+
of type {@link InlayHint} or a Thenable that resolves to such.
|
| 193 |
+
|
| 194 |
+
@since 3.17.0
|
| 195 |
+
"""
|
| 196 |
+
return self._send_request("inlayHint/resolve", params)
|
| 197 |
+
|
| 198 |
+
def text_document_diagnostic(self, params: lsp_types.DocumentDiagnosticParams) -> "lsp_types.DocumentDiagnosticReport":
|
| 199 |
+
"""The document diagnostic request definition.
|
| 200 |
+
|
| 201 |
+
@since 3.17.0
|
| 202 |
+
"""
|
| 203 |
+
return self._send_request("textDocument/diagnostic", params)
|
| 204 |
+
|
| 205 |
+
def workspace_diagnostic(self, params: lsp_types.WorkspaceDiagnosticParams) -> "lsp_types.WorkspaceDiagnosticReport":
|
| 206 |
+
"""The workspace diagnostic request definition.
|
| 207 |
+
|
| 208 |
+
@since 3.17.0
|
| 209 |
+
"""
|
| 210 |
+
return self._send_request("workspace/diagnostic", params)
|
| 211 |
+
|
| 212 |
+
def initialize(self, params: lsp_types.InitializeParams) -> "lsp_types.InitializeResult":
|
| 213 |
+
"""The initialize request is sent from the client to the server.
|
| 214 |
+
It is sent once as the request after starting up the server.
|
| 215 |
+
The requests parameter is of type {@link InitializeParams}
|
| 216 |
+
the response if of type {@link InitializeResult} of a Thenable that
|
| 217 |
+
resolves to such.
|
| 218 |
+
"""
|
| 219 |
+
return self._send_request("initialize", params)
|
| 220 |
+
|
| 221 |
+
def shutdown(self) -> None:
|
| 222 |
+
"""A shutdown request is sent from the client to the server.
|
| 223 |
+
It is sent once when the client decides to shutdown the
|
| 224 |
+
server. The only notification that is sent after a shutdown request
|
| 225 |
+
is the exit event.
|
| 226 |
+
"""
|
| 227 |
+
return self._send_request("shutdown")
|
| 228 |
+
|
| 229 |
+
def will_save_wait_until(self, params: lsp_types.WillSaveTextDocumentParams) -> list["lsp_types.TextEdit"] | None:
|
| 230 |
+
"""A document will save request is sent from the client to the server before
|
| 231 |
+
the document is actually saved. The request can return an array of TextEdits
|
| 232 |
+
which will be applied to the text document before it is saved. Please note that
|
| 233 |
+
clients might drop results if computing the text edits took too long or if a
|
| 234 |
+
server constantly fails on this request. This is done to keep the save fast and
|
| 235 |
+
reliable.
|
| 236 |
+
"""
|
| 237 |
+
return self._send_request("textDocument/willSaveWaitUntil", params)
|
| 238 |
+
|
| 239 |
+
def completion(self, params: lsp_types.CompletionParams) -> Union[list["lsp_types.CompletionItem"], "lsp_types.CompletionList", None]:
|
| 240 |
+
"""Request to request completion at a given text document position. The request's
|
| 241 |
+
parameter is of type {@link TextDocumentPosition} the response
|
| 242 |
+
is of type {@link CompletionItem CompletionItem[]} or {@link CompletionList}
|
| 243 |
+
or a Thenable that resolves to such.
|
| 244 |
+
|
| 245 |
+
The request can delay the computation of the {@link CompletionItem.detail `detail`}
|
| 246 |
+
and {@link CompletionItem.documentation `documentation`} properties to the `completionItem/resolve`
|
| 247 |
+
request. However, properties that are needed for the initial sorting and filtering, like `sortText`,
|
| 248 |
+
`filterText`, `insertText`, and `textEdit`, must not be changed during resolve.
|
| 249 |
+
"""
|
| 250 |
+
return self._send_request("textDocument/completion", params)
|
| 251 |
+
|
| 252 |
+
def resolve_completion_item(self, params: lsp_types.CompletionItem) -> "lsp_types.CompletionItem":
|
| 253 |
+
"""Request to resolve additional information for a given completion item.The request's
|
| 254 |
+
parameter is of type {@link CompletionItem} the response
|
| 255 |
+
is of type {@link CompletionItem} or a Thenable that resolves to such.
|
| 256 |
+
"""
|
| 257 |
+
return self._send_request("completionItem/resolve", params)
|
| 258 |
+
|
| 259 |
+
def hover(self, params: lsp_types.HoverParams) -> Union["lsp_types.Hover", None]:
|
| 260 |
+
"""Request to request hover information at a given text document position. The request's
|
| 261 |
+
parameter is of type {@link TextDocumentPosition} the response is of
|
| 262 |
+
type {@link Hover} or a Thenable that resolves to such.
|
| 263 |
+
"""
|
| 264 |
+
return self._send_request("textDocument/hover", params)
|
| 265 |
+
|
| 266 |
+
def signature_help(self, params: lsp_types.SignatureHelpParams) -> Union["lsp_types.SignatureHelp", None]:
|
| 267 |
+
return self._send_request("textDocument/signatureHelp", params)
|
| 268 |
+
|
| 269 |
+
def definition(self, params: lsp_types.DefinitionParams) -> Union["lsp_types.Definition", list["lsp_types.LocationLink"], None]:
|
| 270 |
+
"""A request to resolve the definition location of a symbol at a given text
|
| 271 |
+
document position. The request's parameter is of type [TextDocumentPosition]
|
| 272 |
+
(#TextDocumentPosition) the response is of either type {@link Definition}
|
| 273 |
+
or a typed array of {@link DefinitionLink} or a Thenable that resolves
|
| 274 |
+
to such.
|
| 275 |
+
"""
|
| 276 |
+
return self._send_request("textDocument/definition", params)
|
| 277 |
+
|
| 278 |
+
def references(self, params: lsp_types.ReferenceParams) -> list["lsp_types.Location"] | None:
|
| 279 |
+
"""A request to resolve project-wide references for the symbol denoted
|
| 280 |
+
by the given text document position. The request's parameter is of
|
| 281 |
+
type {@link ReferenceParams} the response is of type
|
| 282 |
+
{@link Location Location[]} or a Thenable that resolves to such.
|
| 283 |
+
"""
|
| 284 |
+
return self._send_request("textDocument/references", params)
|
| 285 |
+
|
| 286 |
+
def document_highlight(self, params: lsp_types.DocumentHighlightParams) -> list["lsp_types.DocumentHighlight"] | None:
|
| 287 |
+
"""Request to resolve a {@link DocumentHighlight} for a given
|
| 288 |
+
text document position. The request's parameter is of type [TextDocumentPosition]
|
| 289 |
+
(#TextDocumentPosition) the request response is of type [DocumentHighlight[]]
|
| 290 |
+
(#DocumentHighlight) or a Thenable that resolves to such.
|
| 291 |
+
"""
|
| 292 |
+
return self._send_request("textDocument/documentHighlight", params)
|
| 293 |
+
|
| 294 |
+
def document_symbol(
|
| 295 |
+
self, params: lsp_types.DocumentSymbolParams
|
| 296 |
+
) -> list["lsp_types.SymbolInformation"] | list["lsp_types.DocumentSymbol"] | None:
|
| 297 |
+
"""A request to list all symbols found in a given text document. The request's
|
| 298 |
+
parameter is of type {@link TextDocumentIdentifier} the
|
| 299 |
+
response is of type {@link SymbolInformation SymbolInformation[]} or a Thenable
|
| 300 |
+
that resolves to such.
|
| 301 |
+
"""
|
| 302 |
+
return self._send_request("textDocument/documentSymbol", params)
|
| 303 |
+
|
| 304 |
+
def code_action(self, params: lsp_types.CodeActionParams) -> list[Union["lsp_types.Command", "lsp_types.CodeAction"]] | None:
|
| 305 |
+
"""A request to provide commands for the given text document and range."""
|
| 306 |
+
return self._send_request("textDocument/codeAction", params)
|
| 307 |
+
|
| 308 |
+
def resolve_code_action(self, params: lsp_types.CodeAction) -> "lsp_types.CodeAction":
|
| 309 |
+
"""Request to resolve additional information for a given code action.The request's
|
| 310 |
+
parameter is of type {@link CodeAction} the response
|
| 311 |
+
is of type {@link CodeAction} or a Thenable that resolves to such.
|
| 312 |
+
"""
|
| 313 |
+
return self._send_request("codeAction/resolve", params)
|
| 314 |
+
|
| 315 |
+
def workspace_symbol(
|
| 316 |
+
self, params: lsp_types.WorkspaceSymbolParams
|
| 317 |
+
) -> list["lsp_types.SymbolInformation"] | list["lsp_types.WorkspaceSymbol"] | None:
|
| 318 |
+
"""A request to list project-wide symbols matching the query string given
|
| 319 |
+
by the {@link WorkspaceSymbolParams}. The response is
|
| 320 |
+
of type {@link SymbolInformation SymbolInformation[]} or a Thenable that
|
| 321 |
+
resolves to such.
|
| 322 |
+
|
| 323 |
+
@since 3.17.0 - support for WorkspaceSymbol in the returned data. Clients
|
| 324 |
+
need to advertise support for WorkspaceSymbols via the client capability
|
| 325 |
+
`workspace.symbol.resolveSupport`.
|
| 326 |
+
"""
|
| 327 |
+
return self._send_request("workspace/symbol", params)
|
| 328 |
+
|
| 329 |
+
def resolve_workspace_symbol(self, params: lsp_types.WorkspaceSymbol) -> "lsp_types.WorkspaceSymbol":
|
| 330 |
+
"""A request to resolve the range inside the workspace
|
| 331 |
+
symbol's location.
|
| 332 |
+
|
| 333 |
+
@since 3.17.0
|
| 334 |
+
"""
|
| 335 |
+
return self._send_request("workspaceSymbol/resolve", params)
|
| 336 |
+
|
| 337 |
+
def code_lens(self, params: lsp_types.CodeLensParams) -> list["lsp_types.CodeLens"] | None:
|
| 338 |
+
"""A request to provide code lens for the given text document."""
|
| 339 |
+
return self._send_request("textDocument/codeLens", params)
|
| 340 |
+
|
| 341 |
+
def resolve_code_lens(self, params: lsp_types.CodeLens) -> "lsp_types.CodeLens":
|
| 342 |
+
"""A request to resolve a command for a given code lens."""
|
| 343 |
+
return self._send_request("codeLens/resolve", params)
|
| 344 |
+
|
| 345 |
+
def document_link(self, params: lsp_types.DocumentLinkParams) -> list["lsp_types.DocumentLink"] | None:
|
| 346 |
+
"""A request to provide document links"""
|
| 347 |
+
return self._send_request("textDocument/documentLink", params)
|
| 348 |
+
|
| 349 |
+
def resolve_document_link(self, params: lsp_types.DocumentLink) -> "lsp_types.DocumentLink":
|
| 350 |
+
"""Request to resolve additional information for a given document link. The request's
|
| 351 |
+
parameter is of type {@link DocumentLink} the response
|
| 352 |
+
is of type {@link DocumentLink} or a Thenable that resolves to such.
|
| 353 |
+
"""
|
| 354 |
+
return self._send_request("documentLink/resolve", params)
|
| 355 |
+
|
| 356 |
+
def formatting(self, params: lsp_types.DocumentFormattingParams) -> list["lsp_types.TextEdit"] | None:
|
| 357 |
+
"""A request to to format a whole document."""
|
| 358 |
+
return self._send_request("textDocument/formatting", params)
|
| 359 |
+
|
| 360 |
+
def range_formatting(self, params: lsp_types.DocumentRangeFormattingParams) -> list["lsp_types.TextEdit"] | None:
|
| 361 |
+
"""A request to to format a range in a document."""
|
| 362 |
+
return self._send_request("textDocument/rangeFormatting", params)
|
| 363 |
+
|
| 364 |
+
def on_type_formatting(self, params: lsp_types.DocumentOnTypeFormattingParams) -> list["lsp_types.TextEdit"] | None:
|
| 365 |
+
"""A request to format a document on type."""
|
| 366 |
+
return self._send_request("textDocument/onTypeFormatting", params)
|
| 367 |
+
|
| 368 |
+
def rename(self, params: lsp_types.RenameParams) -> Union["lsp_types.WorkspaceEdit", None]:
|
| 369 |
+
"""A request to rename a symbol."""
|
| 370 |
+
return self._send_request("textDocument/rename", params)
|
| 371 |
+
|
| 372 |
+
def prepare_rename(self, params: lsp_types.PrepareRenameParams) -> Union["lsp_types.PrepareRenameResult", None]:
|
| 373 |
+
"""A request to test and perform the setup necessary for a rename.
|
| 374 |
+
|
| 375 |
+
@since 3.16 - support for default behavior
|
| 376 |
+
"""
|
| 377 |
+
return self._send_request("textDocument/prepareRename", params)
|
| 378 |
+
|
| 379 |
+
def execute_command(self, params: lsp_types.ExecuteCommandParams) -> Union["lsp_types.LSPAny", None]:
|
| 380 |
+
"""A request send from the client to the server to execute a command. The request might return
|
| 381 |
+
a workspace edit which the client will apply to the workspace.
|
| 382 |
+
"""
|
| 383 |
+
return self._send_request("workspace/executeCommand", params)
|
projects/ui/serena-new/src/solidlsp/ls_types.py
ADDED
|
@@ -0,0 +1,343 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Defines wrapper objects around the types returned by LSP to ensure decoupling between LSP versions and multilspy
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from __future__ import annotations
|
| 6 |
+
|
| 7 |
+
from enum import Enum, IntEnum
|
| 8 |
+
from typing import NotRequired, Union
|
| 9 |
+
|
| 10 |
+
from typing_extensions import TypedDict
|
| 11 |
+
|
| 12 |
+
URI = str
|
| 13 |
+
DocumentUri = str
|
| 14 |
+
Uint = int
|
| 15 |
+
RegExp = str
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class Position(TypedDict):
|
| 19 |
+
r"""Position in a text document expressed as zero-based line and character
|
| 20 |
+
offset. Prior to 3.17 the offsets were always based on a UTF-16 string
|
| 21 |
+
representation. So a string of the form `a𐐀b` the character offset of the
|
| 22 |
+
character `a` is 0, the character offset of `𐐀` is 1 and the character
|
| 23 |
+
offset of b is 3 since `𐐀` is represented using two code units in UTF-16.
|
| 24 |
+
Since 3.17 clients and servers can agree on a different string encoding
|
| 25 |
+
representation (e.g. UTF-8). The client announces it's supported encoding
|
| 26 |
+
via the client capability [`general.positionEncodings`](#clientCapabilities).
|
| 27 |
+
The value is an array of position encodings the client supports, with
|
| 28 |
+
decreasing preference (e.g. the encoding at index `0` is the most preferred
|
| 29 |
+
one). To stay backwards compatible the only mandatory encoding is UTF-16
|
| 30 |
+
represented via the string `utf-16`. The server can pick one of the
|
| 31 |
+
encodings offered by the client and signals that encoding back to the
|
| 32 |
+
client via the initialize result's property
|
| 33 |
+
[`capabilities.positionEncoding`](#serverCapabilities). If the string value
|
| 34 |
+
`utf-16` is missing from the client's capability `general.positionEncodings`
|
| 35 |
+
servers can safely assume that the client supports UTF-16. If the server
|
| 36 |
+
omits the position encoding in its initialize result the encoding defaults
|
| 37 |
+
to the string value `utf-16`. Implementation considerations: since the
|
| 38 |
+
conversion from one encoding into another requires the content of the
|
| 39 |
+
file / line the conversion is best done where the file is read which is
|
| 40 |
+
usually on the server side.
|
| 41 |
+
|
| 42 |
+
Positions are line end character agnostic. So you can not specify a position
|
| 43 |
+
that denotes `\r|\n` or `\n|` where `|` represents the character offset.
|
| 44 |
+
|
| 45 |
+
@since 3.17.0 - support for negotiated position encoding.
|
| 46 |
+
"""
|
| 47 |
+
|
| 48 |
+
line: Uint
|
| 49 |
+
""" Line position in a document (zero-based).
|
| 50 |
+
|
| 51 |
+
If a line number is greater than the number of lines in a document, it defaults back to the number of lines in the document.
|
| 52 |
+
If a line number is negative, it defaults to 0. """
|
| 53 |
+
character: Uint
|
| 54 |
+
""" Character offset on a line in a document (zero-based).
|
| 55 |
+
|
| 56 |
+
The meaning of this offset is determined by the negotiated
|
| 57 |
+
`PositionEncodingKind`.
|
| 58 |
+
|
| 59 |
+
If the character value is greater than the line length it defaults back to the
|
| 60 |
+
line length. """
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class Range(TypedDict):
|
| 64 |
+
"""A range in a text document expressed as (zero-based) start and end positions.
|
| 65 |
+
|
| 66 |
+
If you want to specify a range that contains a line including the line ending
|
| 67 |
+
character(s) then use an end position denoting the start of the next line.
|
| 68 |
+
For example:
|
| 69 |
+
```ts
|
| 70 |
+
{
|
| 71 |
+
start: { line: 5, character: 23 }
|
| 72 |
+
end : { line 6, character : 0 }
|
| 73 |
+
}
|
| 74 |
+
```
|
| 75 |
+
"""
|
| 76 |
+
|
| 77 |
+
start: Position
|
| 78 |
+
""" The range's start position. """
|
| 79 |
+
end: Position
|
| 80 |
+
""" The range's end position. """
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
class Location(TypedDict):
|
| 84 |
+
"""Represents a location inside a resource, such as a line
|
| 85 |
+
inside a text file.
|
| 86 |
+
"""
|
| 87 |
+
|
| 88 |
+
uri: DocumentUri
|
| 89 |
+
range: Range
|
| 90 |
+
absolutePath: str
|
| 91 |
+
relativePath: str | None
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class CompletionItemKind(IntEnum):
|
| 95 |
+
"""The kind of a completion entry."""
|
| 96 |
+
|
| 97 |
+
Text = 1
|
| 98 |
+
Method = 2
|
| 99 |
+
Function = 3
|
| 100 |
+
Constructor = 4
|
| 101 |
+
Field = 5
|
| 102 |
+
Variable = 6
|
| 103 |
+
Class = 7
|
| 104 |
+
Interface = 8
|
| 105 |
+
Module = 9
|
| 106 |
+
Property = 10
|
| 107 |
+
Unit = 11
|
| 108 |
+
Value = 12
|
| 109 |
+
Enum = 13
|
| 110 |
+
Keyword = 14
|
| 111 |
+
Snippet = 15
|
| 112 |
+
Color = 16
|
| 113 |
+
File = 17
|
| 114 |
+
Reference = 18
|
| 115 |
+
Folder = 19
|
| 116 |
+
EnumMember = 20
|
| 117 |
+
Constant = 21
|
| 118 |
+
Struct = 22
|
| 119 |
+
Event = 23
|
| 120 |
+
Operator = 24
|
| 121 |
+
TypeParameter = 25
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
class CompletionItem(TypedDict):
|
| 125 |
+
"""A completion item represents a text snippet that is
|
| 126 |
+
proposed to complete text that is being typed.
|
| 127 |
+
"""
|
| 128 |
+
|
| 129 |
+
completionText: str
|
| 130 |
+
""" The completionText of this completion item.
|
| 131 |
+
|
| 132 |
+
The completionText property is also by default the text that
|
| 133 |
+
is inserted when selecting this completion."""
|
| 134 |
+
|
| 135 |
+
kind: CompletionItemKind
|
| 136 |
+
""" The kind of this completion item. Based of the kind
|
| 137 |
+
an icon is chosen by the editor. """
|
| 138 |
+
|
| 139 |
+
detail: NotRequired[str]
|
| 140 |
+
""" A human-readable string with additional information
|
| 141 |
+
about this item, like type or symbol information. """
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
class SymbolKind(IntEnum):
|
| 145 |
+
"""A symbol kind."""
|
| 146 |
+
|
| 147 |
+
File = 1
|
| 148 |
+
Module = 2
|
| 149 |
+
Namespace = 3
|
| 150 |
+
Package = 4
|
| 151 |
+
Class = 5
|
| 152 |
+
Method = 6
|
| 153 |
+
Property = 7
|
| 154 |
+
Field = 8
|
| 155 |
+
Constructor = 9
|
| 156 |
+
Enum = 10
|
| 157 |
+
Interface = 11
|
| 158 |
+
Function = 12
|
| 159 |
+
Variable = 13
|
| 160 |
+
Constant = 14
|
| 161 |
+
String = 15
|
| 162 |
+
Number = 16
|
| 163 |
+
Boolean = 17
|
| 164 |
+
Array = 18
|
| 165 |
+
Object = 19
|
| 166 |
+
Key = 20
|
| 167 |
+
Null = 21
|
| 168 |
+
EnumMember = 22
|
| 169 |
+
Struct = 23
|
| 170 |
+
Event = 24
|
| 171 |
+
Operator = 25
|
| 172 |
+
TypeParameter = 26
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
class SymbolTag(IntEnum):
|
| 176 |
+
"""Symbol tags are extra annotations that tweak the rendering of a symbol.
|
| 177 |
+
|
| 178 |
+
@since 3.16
|
| 179 |
+
"""
|
| 180 |
+
|
| 181 |
+
Deprecated = 1
|
| 182 |
+
""" Render a symbol as obsolete, usually using a strike-out. """
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
class UnifiedSymbolInformation(TypedDict):
|
| 186 |
+
"""Represents information about programming constructs like variables, classes,
|
| 187 |
+
interfaces etc.
|
| 188 |
+
"""
|
| 189 |
+
|
| 190 |
+
deprecated: NotRequired[bool]
|
| 191 |
+
""" Indicates if this symbol is deprecated.
|
| 192 |
+
|
| 193 |
+
@deprecated Use tags instead """
|
| 194 |
+
location: NotRequired[Location]
|
| 195 |
+
""" The location of this symbol. The location's range is used by a tool
|
| 196 |
+
to reveal the location in the editor. If the symbol is selected in the
|
| 197 |
+
tool the range's start information is used to position the cursor. So
|
| 198 |
+
the range usually spans more than the actual symbol's name and does
|
| 199 |
+
normally include things like visibility modifiers.
|
| 200 |
+
|
| 201 |
+
The range doesn't have to denote a node range in the sense of an abstract
|
| 202 |
+
syntax tree. It can therefore not be used to re-construct a hierarchy of
|
| 203 |
+
the symbols. """
|
| 204 |
+
name: str
|
| 205 |
+
""" The name of this symbol. """
|
| 206 |
+
kind: SymbolKind
|
| 207 |
+
""" The kind of this symbol. """
|
| 208 |
+
tags: NotRequired[list[SymbolTag]]
|
| 209 |
+
""" Tags for this symbol.
|
| 210 |
+
|
| 211 |
+
@since 3.16.0 """
|
| 212 |
+
containerName: NotRequired[str]
|
| 213 |
+
""" The name of the symbol containing this symbol. This information is for
|
| 214 |
+
user interface purposes (e.g. to render a qualifier in the user interface
|
| 215 |
+
if necessary). It can't be used to re-infer a hierarchy for the document
|
| 216 |
+
symbols.
|
| 217 |
+
|
| 218 |
+
Note: within Serena, the parent attribute was added and should be used instead.
|
| 219 |
+
Most LS don't provide containerName.
|
| 220 |
+
"""
|
| 221 |
+
|
| 222 |
+
detail: NotRequired[str]
|
| 223 |
+
""" More detail for this symbol, e.g the signature of a function. """
|
| 224 |
+
|
| 225 |
+
range: NotRequired[Range]
|
| 226 |
+
""" The range enclosing this symbol not including leading/trailing whitespace but everything else
|
| 227 |
+
like comments. This information is typically used to determine if the clients cursor is
|
| 228 |
+
inside the symbol to reveal in the symbol in the UI. """
|
| 229 |
+
selectionRange: NotRequired[Range]
|
| 230 |
+
""" The range that should be selected and revealed when this symbol is being picked, e.g the name of a function.
|
| 231 |
+
Must be contained by the `range`. """
|
| 232 |
+
|
| 233 |
+
body: NotRequired[str]
|
| 234 |
+
""" The body of the symbol. """
|
| 235 |
+
|
| 236 |
+
children: list[UnifiedSymbolInformation]
|
| 237 |
+
""" The children of the symbol.
|
| 238 |
+
Added to be compatible with `lsp_types.DocumentSymbol`,
|
| 239 |
+
since it is sometimes useful to have the children of the symbol as a user-facing feature."""
|
| 240 |
+
|
| 241 |
+
parent: NotRequired[UnifiedSymbolInformation | None]
|
| 242 |
+
"""The parent of the symbol, if there is any. Added with Serena, not part of the LSP.
|
| 243 |
+
All symbols except the root packages will have a parent.
|
| 244 |
+
"""
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
class MarkupKind(Enum):
|
| 248 |
+
"""Describes the content type that a client supports in various
|
| 249 |
+
result literals like `Hover`, `ParameterInfo` or `CompletionItem`.
|
| 250 |
+
|
| 251 |
+
Please note that `MarkupKinds` must not start with a `$`. This kinds
|
| 252 |
+
are reserved for internal usage.
|
| 253 |
+
"""
|
| 254 |
+
|
| 255 |
+
PlainText = "plaintext"
|
| 256 |
+
""" Plain text is supported as a content format """
|
| 257 |
+
Markdown = "markdown"
|
| 258 |
+
""" Markdown is supported as a content format """
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
class __MarkedString_Type_1(TypedDict):
|
| 262 |
+
language: str
|
| 263 |
+
value: str
|
| 264 |
+
|
| 265 |
+
|
| 266 |
+
MarkedString = Union[str, "__MarkedString_Type_1"]
|
| 267 |
+
""" MarkedString can be used to render human readable text. It is either a markdown string
|
| 268 |
+
or a code-block that provides a language and a code snippet. The language identifier
|
| 269 |
+
is semantically equal to the optional language identifier in fenced code blocks in GitHub
|
| 270 |
+
issues. See https://help.github.com/articles/creating-and-highlighting-code-blocks/#syntax-highlighting
|
| 271 |
+
|
| 272 |
+
The pair of a language and a value is an equivalent to markdown:
|
| 273 |
+
```${language}
|
| 274 |
+
${value}
|
| 275 |
+
```
|
| 276 |
+
|
| 277 |
+
Note that markdown strings will be sanitized - that means html will be escaped.
|
| 278 |
+
@deprecated use MarkupContent instead. """
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
class MarkupContent(TypedDict):
|
| 282 |
+
r"""A `MarkupContent` literal represents a string value which content is interpreted base on its
|
| 283 |
+
kind flag. Currently the protocol supports `plaintext` and `markdown` as markup kinds.
|
| 284 |
+
|
| 285 |
+
If the kind is `markdown` then the value can contain fenced code blocks like in GitHub issues.
|
| 286 |
+
See https://help.github.com/articles/creating-and-highlighting-code-blocks/#syntax-highlighting
|
| 287 |
+
|
| 288 |
+
Here is an example how such a string can be constructed using JavaScript / TypeScript:
|
| 289 |
+
```ts
|
| 290 |
+
let markdown: MarkdownContent = {
|
| 291 |
+
kind: MarkupKind.Markdown,
|
| 292 |
+
value: [
|
| 293 |
+
'# Header',
|
| 294 |
+
'Some text',
|
| 295 |
+
'```typescript',
|
| 296 |
+
'someCode();',
|
| 297 |
+
'```'
|
| 298 |
+
].join('\n')
|
| 299 |
+
};
|
| 300 |
+
```
|
| 301 |
+
|
| 302 |
+
*Please Note* that clients might sanitize the return markdown. A client could decide to
|
| 303 |
+
remove HTML from the markdown to avoid script execution.
|
| 304 |
+
"""
|
| 305 |
+
|
| 306 |
+
kind: MarkupKind
|
| 307 |
+
""" The type of the Markup """
|
| 308 |
+
value: str
|
| 309 |
+
""" The content itself """
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
class Hover(TypedDict):
|
| 313 |
+
"""The result of a hover request."""
|
| 314 |
+
|
| 315 |
+
contents: MarkupContent | MarkedString | list[MarkedString]
|
| 316 |
+
""" The hover's content """
|
| 317 |
+
range: NotRequired[Range]
|
| 318 |
+
""" An optional range inside the text document that is used to
|
| 319 |
+
visualize the hover, e.g. by changing the background color. """
|
| 320 |
+
|
| 321 |
+
|
| 322 |
+
class DiagnosticsSeverity(IntEnum):
|
| 323 |
+
ERROR = 1
|
| 324 |
+
WARNING = 2
|
| 325 |
+
INFORMATION = 3
|
| 326 |
+
HINT = 4
|
| 327 |
+
|
| 328 |
+
|
| 329 |
+
class Diagnostic(TypedDict):
|
| 330 |
+
"""Diagnostic information for a text document."""
|
| 331 |
+
|
| 332 |
+
uri: DocumentUri
|
| 333 |
+
""" The URI of the text document to which the diagnostics apply. """
|
| 334 |
+
range: Range
|
| 335 |
+
""" The range of the text document to which the diagnostics apply. """
|
| 336 |
+
severity: NotRequired[DiagnosticsSeverity]
|
| 337 |
+
""" The severity of the diagnostic. """
|
| 338 |
+
message: str
|
| 339 |
+
""" The diagnostic message. """
|
| 340 |
+
code: str
|
| 341 |
+
""" The code of the diagnostic. """
|
| 342 |
+
source: NotRequired[str]
|
| 343 |
+
""" The source of the diagnostic, e.g. the name of the tool that produced it. """
|
projects/ui/serena-new/src/solidlsp/ls_utils.py
ADDED
|
@@ -0,0 +1,406 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
This file contains various utility functions like I/O operations, handling paths, etc.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import gzip
|
| 6 |
+
import logging
|
| 7 |
+
import os
|
| 8 |
+
import platform
|
| 9 |
+
import shutil
|
| 10 |
+
import subprocess
|
| 11 |
+
import uuid
|
| 12 |
+
import zipfile
|
| 13 |
+
from enum import Enum
|
| 14 |
+
from pathlib import Path, PurePath
|
| 15 |
+
|
| 16 |
+
import requests
|
| 17 |
+
|
| 18 |
+
from solidlsp.ls_exceptions import SolidLSPException
|
| 19 |
+
from solidlsp.ls_logger import LanguageServerLogger
|
| 20 |
+
from solidlsp.ls_types import UnifiedSymbolInformation
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class InvalidTextLocationError(Exception):
|
| 24 |
+
pass
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class TextUtils:
|
| 28 |
+
"""
|
| 29 |
+
Utilities for text operations.
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
@staticmethod
|
| 33 |
+
def get_line_col_from_index(text: str, index: int) -> tuple[int, int]:
|
| 34 |
+
"""
|
| 35 |
+
Returns the zero-indexed line and column number of the given index in the given text
|
| 36 |
+
"""
|
| 37 |
+
l = 0
|
| 38 |
+
c = 0
|
| 39 |
+
idx = 0
|
| 40 |
+
while idx < index:
|
| 41 |
+
if text[idx] == "\n":
|
| 42 |
+
l += 1
|
| 43 |
+
c = 0
|
| 44 |
+
else:
|
| 45 |
+
c += 1
|
| 46 |
+
idx += 1
|
| 47 |
+
|
| 48 |
+
return l, c
|
| 49 |
+
|
| 50 |
+
@staticmethod
|
| 51 |
+
def get_index_from_line_col(text: str, line: int, col: int) -> int:
|
| 52 |
+
"""
|
| 53 |
+
Returns the index of the given zero-indexed line and column number in the given text
|
| 54 |
+
"""
|
| 55 |
+
idx = 0
|
| 56 |
+
while line > 0:
|
| 57 |
+
if idx >= len(text):
|
| 58 |
+
raise InvalidTextLocationError
|
| 59 |
+
if text[idx] == "\n":
|
| 60 |
+
line -= 1
|
| 61 |
+
idx += 1
|
| 62 |
+
idx += col
|
| 63 |
+
return idx
|
| 64 |
+
|
| 65 |
+
@staticmethod
|
| 66 |
+
def _get_updated_position_from_line_and_column_and_edit(l: int, c: int, text_to_be_inserted: str) -> tuple[int, int]:
|
| 67 |
+
"""
|
| 68 |
+
Utility function to get the position of the cursor after inserting text at a given line and column.
|
| 69 |
+
"""
|
| 70 |
+
num_newlines_in_gen_text = text_to_be_inserted.count("\n")
|
| 71 |
+
if num_newlines_in_gen_text > 0:
|
| 72 |
+
l += num_newlines_in_gen_text
|
| 73 |
+
c = len(text_to_be_inserted.split("\n")[-1])
|
| 74 |
+
else:
|
| 75 |
+
c += len(text_to_be_inserted)
|
| 76 |
+
return (l, c)
|
| 77 |
+
|
| 78 |
+
@staticmethod
|
| 79 |
+
def delete_text_between_positions(text: str, start_line: int, start_col: int, end_line: int, end_col: int) -> tuple[str, str]:
|
| 80 |
+
"""
|
| 81 |
+
Deletes the text between the given start and end positions.
|
| 82 |
+
Returns the modified text and the deleted text.
|
| 83 |
+
"""
|
| 84 |
+
del_start_idx = TextUtils.get_index_from_line_col(text, start_line, start_col)
|
| 85 |
+
del_end_idx = TextUtils.get_index_from_line_col(text, end_line, end_col)
|
| 86 |
+
|
| 87 |
+
deleted_text = text[del_start_idx:del_end_idx]
|
| 88 |
+
new_text = text[:del_start_idx] + text[del_end_idx:]
|
| 89 |
+
return new_text, deleted_text
|
| 90 |
+
|
| 91 |
+
@staticmethod
|
| 92 |
+
def insert_text_at_position(text: str, line: int, col: int, text_to_be_inserted: str) -> tuple[str, int, int]:
|
| 93 |
+
"""
|
| 94 |
+
Inserts the given text at the given line and column.
|
| 95 |
+
Returns the modified text and the new line and column.
|
| 96 |
+
"""
|
| 97 |
+
try:
|
| 98 |
+
change_index = TextUtils.get_index_from_line_col(text, line, col)
|
| 99 |
+
except InvalidTextLocationError:
|
| 100 |
+
num_lines_in_text = text.count("\n") + 1
|
| 101 |
+
max_line = num_lines_in_text - 1
|
| 102 |
+
if line == max_line + 1 and col == 0: # trying to insert at new line after full text
|
| 103 |
+
# insert at end, adding missing newline
|
| 104 |
+
change_index = len(text)
|
| 105 |
+
text_to_be_inserted = "\n" + text_to_be_inserted
|
| 106 |
+
else:
|
| 107 |
+
raise
|
| 108 |
+
new_text = text[:change_index] + text_to_be_inserted + text[change_index:]
|
| 109 |
+
new_l, new_c = TextUtils._get_updated_position_from_line_and_column_and_edit(line, col, text_to_be_inserted)
|
| 110 |
+
return new_text, new_l, new_c
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
class PathUtils:
|
| 114 |
+
"""
|
| 115 |
+
Utilities for platform-agnostic path operations.
|
| 116 |
+
"""
|
| 117 |
+
|
| 118 |
+
@staticmethod
|
| 119 |
+
def uri_to_path(uri: str) -> str:
|
| 120 |
+
"""
|
| 121 |
+
Converts a URI to a file path. Works on both Linux and Windows.
|
| 122 |
+
|
| 123 |
+
This method was obtained from https://stackoverflow.com/a/61922504
|
| 124 |
+
"""
|
| 125 |
+
try:
|
| 126 |
+
from urllib.parse import unquote, urlparse
|
| 127 |
+
from urllib.request import url2pathname
|
| 128 |
+
except ImportError:
|
| 129 |
+
# backwards compatibility
|
| 130 |
+
from urllib import unquote, url2pathname
|
| 131 |
+
|
| 132 |
+
from urlparse import urlparse
|
| 133 |
+
parsed = urlparse(uri)
|
| 134 |
+
host = f"{os.path.sep}{os.path.sep}{parsed.netloc}{os.path.sep}"
|
| 135 |
+
path = os.path.normpath(os.path.join(host, url2pathname(unquote(parsed.path))))
|
| 136 |
+
return path
|
| 137 |
+
|
| 138 |
+
@staticmethod
|
| 139 |
+
def path_to_uri(path: str) -> str:
|
| 140 |
+
"""
|
| 141 |
+
Converts a file path to a file URI (file:///...).
|
| 142 |
+
"""
|
| 143 |
+
return str(Path(path).absolute().as_uri())
|
| 144 |
+
|
| 145 |
+
@staticmethod
|
| 146 |
+
def is_glob_pattern(pattern: str) -> bool:
|
| 147 |
+
"""Check if a pattern contains glob-specific characters."""
|
| 148 |
+
return any(c in pattern for c in "*?[]!")
|
| 149 |
+
|
| 150 |
+
@staticmethod
|
| 151 |
+
def get_relative_path(path: str, base_path: str) -> str | None:
|
| 152 |
+
"""
|
| 153 |
+
Gets relative path if it's possible (paths should be on the same drive),
|
| 154 |
+
returns `None` otherwise.
|
| 155 |
+
"""
|
| 156 |
+
if PurePath(path).drive == PurePath(base_path).drive:
|
| 157 |
+
rel_path = str(PurePath(os.path.relpath(path, base_path)))
|
| 158 |
+
return rel_path
|
| 159 |
+
return None
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
class FileUtils:
|
| 163 |
+
"""
|
| 164 |
+
Utility functions for file operations.
|
| 165 |
+
"""
|
| 166 |
+
|
| 167 |
+
@staticmethod
|
| 168 |
+
def read_file(logger: LanguageServerLogger, file_path: str) -> str:
|
| 169 |
+
"""
|
| 170 |
+
Reads the file at the given path and returns the contents as a string.
|
| 171 |
+
"""
|
| 172 |
+
if not os.path.exists(file_path):
|
| 173 |
+
logger.log(f"File read '{file_path}' failed: File does not exist.", logging.ERROR)
|
| 174 |
+
raise SolidLSPException(f"File read '{file_path}' failed: File does not exist.")
|
| 175 |
+
try:
|
| 176 |
+
with open(file_path, encoding="utf-8") as inp_file:
|
| 177 |
+
return inp_file.read()
|
| 178 |
+
except Exception as exc:
|
| 179 |
+
logger.log(f"File read '{file_path}' failed to read with encoding 'utf-8': {exc}", logging.ERROR)
|
| 180 |
+
raise SolidLSPException("File read failed.") from None
|
| 181 |
+
|
| 182 |
+
@staticmethod
|
| 183 |
+
def download_file(logger: LanguageServerLogger, url: str, target_path: str) -> None:
|
| 184 |
+
"""
|
| 185 |
+
Downloads the file from the given URL to the given {target_path}
|
| 186 |
+
"""
|
| 187 |
+
os.makedirs(os.path.dirname(target_path), exist_ok=True)
|
| 188 |
+
try:
|
| 189 |
+
response = requests.get(url, stream=True, timeout=60)
|
| 190 |
+
if response.status_code != 200:
|
| 191 |
+
logger.log(f"Error downloading file '{url}': {response.status_code} {response.text}", logging.ERROR)
|
| 192 |
+
raise SolidLSPException("Error downloading file.")
|
| 193 |
+
with open(target_path, "wb") as f:
|
| 194 |
+
shutil.copyfileobj(response.raw, f)
|
| 195 |
+
except Exception as exc:
|
| 196 |
+
logger.log(f"Error downloading file '{url}': {exc}", logging.ERROR)
|
| 197 |
+
raise SolidLSPException("Error downloading file.") from None
|
| 198 |
+
|
| 199 |
+
@staticmethod
|
| 200 |
+
def download_and_extract_archive(logger: LanguageServerLogger, url: str, target_path: str, archive_type: str) -> None:
|
| 201 |
+
"""
|
| 202 |
+
Downloads the archive from the given URL having format {archive_type} and extracts it to the given {target_path}
|
| 203 |
+
"""
|
| 204 |
+
try:
|
| 205 |
+
tmp_files = []
|
| 206 |
+
tmp_file_name = str(PurePath(os.path.expanduser("~"), "multilspy_tmp", uuid.uuid4().hex))
|
| 207 |
+
tmp_files.append(tmp_file_name)
|
| 208 |
+
os.makedirs(os.path.dirname(tmp_file_name), exist_ok=True)
|
| 209 |
+
FileUtils.download_file(logger, url, tmp_file_name)
|
| 210 |
+
if archive_type in ["tar", "gztar", "bztar", "xztar"]:
|
| 211 |
+
os.makedirs(target_path, exist_ok=True)
|
| 212 |
+
shutil.unpack_archive(tmp_file_name, target_path, archive_type)
|
| 213 |
+
elif archive_type == "zip":
|
| 214 |
+
os.makedirs(target_path, exist_ok=True)
|
| 215 |
+
with zipfile.ZipFile(tmp_file_name, "r") as zip_ref:
|
| 216 |
+
for zip_info in zip_ref.infolist():
|
| 217 |
+
extracted_path = zip_ref.extract(zip_info, target_path)
|
| 218 |
+
ZIP_SYSTEM_UNIX = 3 # zip file created on Unix system
|
| 219 |
+
if zip_info.create_system != ZIP_SYSTEM_UNIX:
|
| 220 |
+
continue
|
| 221 |
+
# extractall() does not preserve permissions
|
| 222 |
+
# see. https://github.com/python/cpython/issues/59999
|
| 223 |
+
attrs = (zip_info.external_attr >> 16) & 0o777
|
| 224 |
+
if attrs:
|
| 225 |
+
os.chmod(extracted_path, attrs)
|
| 226 |
+
elif archive_type == "zip.gz":
|
| 227 |
+
os.makedirs(target_path, exist_ok=True)
|
| 228 |
+
tmp_file_name_ungzipped = tmp_file_name + ".zip"
|
| 229 |
+
tmp_files.append(tmp_file_name_ungzipped)
|
| 230 |
+
with gzip.open(tmp_file_name, "rb") as f_in, open(tmp_file_name_ungzipped, "wb") as f_out:
|
| 231 |
+
shutil.copyfileobj(f_in, f_out)
|
| 232 |
+
shutil.unpack_archive(tmp_file_name_ungzipped, target_path, "zip")
|
| 233 |
+
elif archive_type == "gz":
|
| 234 |
+
with gzip.open(tmp_file_name, "rb") as f_in, open(target_path, "wb") as f_out:
|
| 235 |
+
shutil.copyfileobj(f_in, f_out)
|
| 236 |
+
else:
|
| 237 |
+
logger.log(f"Unknown archive type '{archive_type}' for extraction", logging.ERROR)
|
| 238 |
+
raise SolidLSPException(f"Unknown archive type '{archive_type}'")
|
| 239 |
+
except Exception as exc:
|
| 240 |
+
logger.log(f"Error extracting archive '{tmp_file_name}' obtained from '{url}': {exc}", logging.ERROR)
|
| 241 |
+
raise SolidLSPException("Error extracting archive.") from exc
|
| 242 |
+
finally:
|
| 243 |
+
for tmp_file_name in tmp_files:
|
| 244 |
+
if os.path.exists(tmp_file_name):
|
| 245 |
+
Path.unlink(Path(tmp_file_name))
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
class PlatformId(str, Enum):
|
| 249 |
+
"""
|
| 250 |
+
multilspy supported platforms
|
| 251 |
+
"""
|
| 252 |
+
|
| 253 |
+
WIN_x86 = "win-x86"
|
| 254 |
+
WIN_x64 = "win-x64"
|
| 255 |
+
WIN_arm64 = "win-arm64"
|
| 256 |
+
OSX = "osx"
|
| 257 |
+
OSX_x64 = "osx-x64"
|
| 258 |
+
OSX_arm64 = "osx-arm64"
|
| 259 |
+
LINUX_x86 = "linux-x86"
|
| 260 |
+
LINUX_x64 = "linux-x64"
|
| 261 |
+
LINUX_arm64 = "linux-arm64"
|
| 262 |
+
LINUX_MUSL_x64 = "linux-musl-x64"
|
| 263 |
+
LINUX_MUSL_arm64 = "linux-musl-arm64"
|
| 264 |
+
|
| 265 |
+
def is_windows(self):
|
| 266 |
+
return self.value.startswith("win")
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
class DotnetVersion(str, Enum):
|
| 270 |
+
"""
|
| 271 |
+
multilspy supported dotnet versions
|
| 272 |
+
"""
|
| 273 |
+
|
| 274 |
+
V4 = "4"
|
| 275 |
+
V6 = "6"
|
| 276 |
+
V7 = "7"
|
| 277 |
+
V8 = "8"
|
| 278 |
+
V9 = "9"
|
| 279 |
+
VMONO = "mono"
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
class PlatformUtils:
|
| 283 |
+
"""
|
| 284 |
+
This class provides utilities for platform detection and identification.
|
| 285 |
+
"""
|
| 286 |
+
|
| 287 |
+
@classmethod
|
| 288 |
+
def get_platform_id(cls) -> PlatformId:
|
| 289 |
+
"""
|
| 290 |
+
Returns the platform id for the current system
|
| 291 |
+
"""
|
| 292 |
+
system = platform.system()
|
| 293 |
+
machine = platform.machine()
|
| 294 |
+
bitness = platform.architecture()[0]
|
| 295 |
+
if system == "Windows" and machine == "":
|
| 296 |
+
machine = cls._determine_windows_machine_type()
|
| 297 |
+
system_map = {"Windows": "win", "Darwin": "osx", "Linux": "linux"}
|
| 298 |
+
machine_map = {
|
| 299 |
+
"AMD64": "x64",
|
| 300 |
+
"x86_64": "x64",
|
| 301 |
+
"i386": "x86",
|
| 302 |
+
"i686": "x86",
|
| 303 |
+
"aarch64": "arm64",
|
| 304 |
+
"arm64": "arm64",
|
| 305 |
+
"ARM64": "arm64",
|
| 306 |
+
}
|
| 307 |
+
if system in system_map and machine in machine_map:
|
| 308 |
+
platform_id = system_map[system] + "-" + machine_map[machine]
|
| 309 |
+
if system == "Linux" and bitness == "64bit":
|
| 310 |
+
libc = platform.libc_ver()[0]
|
| 311 |
+
if libc != "glibc":
|
| 312 |
+
platform_id += "-" + libc
|
| 313 |
+
return PlatformId(platform_id)
|
| 314 |
+
else:
|
| 315 |
+
raise SolidLSPException(f"Unknown platform: {system=}, {machine=}, {bitness=}")
|
| 316 |
+
|
| 317 |
+
@staticmethod
|
| 318 |
+
def _determine_windows_machine_type():
|
| 319 |
+
import ctypes
|
| 320 |
+
from ctypes import wintypes
|
| 321 |
+
|
| 322 |
+
class SYSTEM_INFO(ctypes.Structure):
|
| 323 |
+
class _U(ctypes.Union):
|
| 324 |
+
class _S(ctypes.Structure):
|
| 325 |
+
_fields_ = [("wProcessorArchitecture", wintypes.WORD), ("wReserved", wintypes.WORD)]
|
| 326 |
+
|
| 327 |
+
_fields_ = [("dwOemId", wintypes.DWORD), ("s", _S)]
|
| 328 |
+
_anonymous_ = ("s",)
|
| 329 |
+
|
| 330 |
+
_fields_ = [
|
| 331 |
+
("u", _U),
|
| 332 |
+
("dwPageSize", wintypes.DWORD),
|
| 333 |
+
("lpMinimumApplicationAddress", wintypes.LPVOID),
|
| 334 |
+
("lpMaximumApplicationAddress", wintypes.LPVOID),
|
| 335 |
+
("dwActiveProcessorMask", wintypes.LPVOID),
|
| 336 |
+
("dwNumberOfProcessors", wintypes.DWORD),
|
| 337 |
+
("dwProcessorType", wintypes.DWORD),
|
| 338 |
+
("dwAllocationGranularity", wintypes.DWORD),
|
| 339 |
+
("wProcessorLevel", wintypes.WORD),
|
| 340 |
+
("wProcessorRevision", wintypes.WORD),
|
| 341 |
+
]
|
| 342 |
+
_anonymous_ = ("u",)
|
| 343 |
+
|
| 344 |
+
sys_info = SYSTEM_INFO()
|
| 345 |
+
ctypes.windll.kernel32.GetNativeSystemInfo(ctypes.byref(sys_info))
|
| 346 |
+
|
| 347 |
+
arch_map = {
|
| 348 |
+
9: "AMD64",
|
| 349 |
+
5: "ARM",
|
| 350 |
+
12: "arm64",
|
| 351 |
+
6: "Intel Itanium-based",
|
| 352 |
+
0: "i386",
|
| 353 |
+
}
|
| 354 |
+
|
| 355 |
+
return arch_map.get(sys_info.wProcessorArchitecture, f"Unknown ({sys_info.wProcessorArchitecture})")
|
| 356 |
+
|
| 357 |
+
@staticmethod
|
| 358 |
+
def get_dotnet_version() -> DotnetVersion:
|
| 359 |
+
"""
|
| 360 |
+
Returns the dotnet version for the current system
|
| 361 |
+
"""
|
| 362 |
+
try:
|
| 363 |
+
result = subprocess.run(["dotnet", "--list-runtimes"], capture_output=True, check=True)
|
| 364 |
+
available_version_cmd_output = []
|
| 365 |
+
for line in result.stdout.decode("utf-8").split("\n"):
|
| 366 |
+
if line.startswith("Microsoft.NETCore.App"):
|
| 367 |
+
version_cmd_output = line.split(" ")[1]
|
| 368 |
+
available_version_cmd_output.append(version_cmd_output)
|
| 369 |
+
|
| 370 |
+
if not available_version_cmd_output:
|
| 371 |
+
raise SolidLSPException("dotnet not found on the system")
|
| 372 |
+
|
| 373 |
+
# Check for supported versions in order of preference (latest first)
|
| 374 |
+
for version_cmd_output in available_version_cmd_output:
|
| 375 |
+
if version_cmd_output.startswith("9"):
|
| 376 |
+
return DotnetVersion.V9
|
| 377 |
+
if version_cmd_output.startswith("8"):
|
| 378 |
+
return DotnetVersion.V8
|
| 379 |
+
if version_cmd_output.startswith("7"):
|
| 380 |
+
return DotnetVersion.V7
|
| 381 |
+
if version_cmd_output.startswith("6"):
|
| 382 |
+
return DotnetVersion.V6
|
| 383 |
+
if version_cmd_output.startswith("4"):
|
| 384 |
+
return DotnetVersion.V4
|
| 385 |
+
|
| 386 |
+
# If no supported version found, raise exception with all available versions
|
| 387 |
+
raise SolidLSPException(
|
| 388 |
+
f"No supported dotnet version found. Available versions: {', '.join(available_version_cmd_output)}. Supported versions: 4, 6, 7, 8"
|
| 389 |
+
)
|
| 390 |
+
except (FileNotFoundError, subprocess.CalledProcessError):
|
| 391 |
+
try:
|
| 392 |
+
result = subprocess.run(["mono", "--version"], capture_output=True, check=True)
|
| 393 |
+
return DotnetVersion.VMONO
|
| 394 |
+
except (FileNotFoundError, subprocess.CalledProcessError):
|
| 395 |
+
raise SolidLSPException("dotnet or mono not found on the system")
|
| 396 |
+
|
| 397 |
+
|
| 398 |
+
class SymbolUtils:
|
| 399 |
+
@staticmethod
|
| 400 |
+
def symbol_tree_contains_name(roots: list[UnifiedSymbolInformation], name: str) -> bool:
|
| 401 |
+
for symbol in roots:
|
| 402 |
+
if symbol["name"] == name:
|
| 403 |
+
return True
|
| 404 |
+
if SymbolUtils.symbol_tree_contains_name(symbol["children"], name):
|
| 405 |
+
return True
|
| 406 |
+
return False
|
projects/ui/serena-new/src/solidlsp/settings.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Defines settings for Solid-LSP
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import pathlib
|
| 7 |
+
from dataclasses import dataclass
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@dataclass
|
| 11 |
+
class SolidLSPSettings:
|
| 12 |
+
solidlsp_dir: str = str(pathlib.Path.home() / ".solidlsp")
|
| 13 |
+
"""
|
| 14 |
+
Path to the directory in which to store global Solid-LSP data (which is not project-specific)
|
| 15 |
+
"""
|
| 16 |
+
project_data_relative_path: str = ".solidlsp"
|
| 17 |
+
"""
|
| 18 |
+
Relative path within each project directory where Solid-LSP can store project-specific data, e.g. cache files.
|
| 19 |
+
For instance, if this is ".solidlsp" and the project is located at "/home/user/myproject",
|
| 20 |
+
then Solid-LSP will store project-specific data in "/home/user/myproject/.solidlsp".
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
def __post_init__(self):
|
| 24 |
+
os.makedirs(str(self.solidlsp_dir), exist_ok=True)
|
| 25 |
+
os.makedirs(str(self.ls_resources_dir), exist_ok=True)
|
| 26 |
+
|
| 27 |
+
@property
|
| 28 |
+
def ls_resources_dir(self):
|
| 29 |
+
return os.path.join(str(self.solidlsp_dir), "language_servers", "static")
|