sample_id stringlengths 21 196 | text stringlengths 105 936k | metadata dict | category stringclasses 6
values |
|---|---|---|---|
openai/openai-python:src/openai/types/graders/multi_grader_param.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Union
from typing_extensions import Literal, Required, TypeAlias, TypedDict
from .python_grader_param import PythonGraderParam
from .label_model_grader_param import LabelModelGraderParam
from .score_model_grader_param import ScoreModelGraderParam
from .string_check_grader_param import StringCheckGraderParam
from .text_similarity_grader_param import TextSimilarityGraderParam
__all__ = ["MultiGraderParam", "Graders"]
Graders: TypeAlias = Union[
StringCheckGraderParam, TextSimilarityGraderParam, PythonGraderParam, ScoreModelGraderParam, LabelModelGraderParam
]
class MultiGraderParam(TypedDict, total=False):
"""
A MultiGrader object combines the output of multiple graders to produce a single score.
"""
calculate_output: Required[str]
"""A formula to calculate the output based on grader results."""
graders: Required[Graders]
"""
A StringCheckGrader object that performs a string comparison between input and
reference using a specified operation.
"""
name: Required[str]
"""The name of the grader."""
type: Required[Literal["multi"]]
"""The object type, which is always `multi`."""
| {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/graders/multi_grader_param.py",
"license": "Apache License 2.0",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/graders/python_grader.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
from typing_extensions import Literal
from ..._models import BaseModel
__all__ = ["PythonGrader"]
class PythonGrader(BaseModel):
"""A PythonGrader object that runs a python script on the input."""
name: str
"""The name of the grader."""
source: str
"""The source code of the python script."""
type: Literal["python"]
"""The object type, which is always `python`."""
image_tag: Optional[str] = None
"""The image tag to use for the python script."""
| {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/graders/python_grader.py",
"license": "Apache License 2.0",
"lines": 15,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/graders/python_grader_param.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Literal, Required, TypedDict
__all__ = ["PythonGraderParam"]
class PythonGraderParam(TypedDict, total=False):
"""A PythonGrader object that runs a python script on the input."""
name: Required[str]
"""The name of the grader."""
source: Required[str]
"""The source code of the python script."""
type: Required[Literal["python"]]
"""The object type, which is always `python`."""
image_tag: str
"""The image tag to use for the python script."""
| {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/graders/python_grader_param.py",
"license": "Apache License 2.0",
"lines": 14,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/graders/score_model_grader.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Union, Optional
from typing_extensions import Literal, TypeAlias
from ..._models import BaseModel
from .grader_inputs import GraderInputs
from ..shared.reasoning_effort import ReasoningEffort
from ..responses.response_input_text import ResponseInputText
from ..responses.response_input_audio import ResponseInputAudio
__all__ = [
"ScoreModelGrader",
"Input",
"InputContent",
"InputContentOutputText",
"InputContentInputImage",
"SamplingParams",
]
class InputContentOutputText(BaseModel):
"""A text output from the model."""
text: str
"""The text output from the model."""
type: Literal["output_text"]
"""The type of the output text. Always `output_text`."""
class InputContentInputImage(BaseModel):
"""An image input block used within EvalItem content arrays."""
image_url: str
"""The URL of the image input."""
type: Literal["input_image"]
"""The type of the image input. Always `input_image`."""
detail: Optional[str] = None
"""The detail level of the image to be sent to the model.
One of `high`, `low`, or `auto`. Defaults to `auto`.
"""
InputContent: TypeAlias = Union[
str, ResponseInputText, InputContentOutputText, InputContentInputImage, ResponseInputAudio, GraderInputs
]
class Input(BaseModel):
"""
A message input to the model with a role indicating instruction following
hierarchy. Instructions given with the `developer` or `system` role take
precedence over instructions given with the `user` role. Messages with the
`assistant` role are presumed to have been generated by the model in previous
interactions.
"""
content: InputContent
"""Inputs to the model - can contain template strings.
Supports text, output text, input images, and input audio, either as a single
item or an array of items.
"""
role: Literal["user", "assistant", "system", "developer"]
"""The role of the message input.
One of `user`, `assistant`, `system`, or `developer`.
"""
type: Optional[Literal["message"]] = None
"""The type of the message input. Always `message`."""
class SamplingParams(BaseModel):
"""The sampling parameters for the model."""
max_completions_tokens: Optional[int] = None
"""The maximum number of tokens the grader model may generate in its response."""
reasoning_effort: Optional[ReasoningEffort] = None
"""
Constrains effort on reasoning for
[reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently
supported values are `none`, `minimal`, `low`, `medium`, `high`, and `xhigh`.
Reducing reasoning effort can result in faster responses and fewer tokens used
on reasoning in a response.
- `gpt-5.1` defaults to `none`, which does not perform reasoning. The supported
reasoning values for `gpt-5.1` are `none`, `low`, `medium`, and `high`. Tool
calls are supported for all reasoning values in gpt-5.1.
- All models before `gpt-5.1` default to `medium` reasoning effort, and do not
support `none`.
- The `gpt-5-pro` model defaults to (and only supports) `high` reasoning effort.
- `xhigh` is supported for all models after `gpt-5.1-codex-max`.
"""
seed: Optional[int] = None
"""A seed value to initialize the randomness, during sampling."""
temperature: Optional[float] = None
"""A higher temperature increases randomness in the outputs."""
top_p: Optional[float] = None
"""An alternative to temperature for nucleus sampling; 1.0 includes all tokens."""
class ScoreModelGrader(BaseModel):
"""A ScoreModelGrader object that uses a model to assign a score to the input."""
input: List[Input]
"""The input messages evaluated by the grader.
Supports text, output text, input image, and input audio content blocks, and may
include template strings.
"""
model: str
"""The model to use for the evaluation."""
name: str
"""The name of the grader."""
type: Literal["score_model"]
"""The object type, which is always `score_model`."""
range: Optional[List[float]] = None
"""The range of the score. Defaults to `[0, 1]`."""
sampling_params: Optional[SamplingParams] = None
"""The sampling parameters for the model."""
| {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/graders/score_model_grader.py",
"license": "Apache License 2.0",
"lines": 96,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
openai/openai-python:src/openai/types/graders/score_model_grader_param.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Union, Iterable, Optional
from typing_extensions import Literal, Required, TypeAlias, TypedDict
from .grader_inputs_param import GraderInputsParam
from ..shared.reasoning_effort import ReasoningEffort
from ..responses.response_input_text_param import ResponseInputTextParam
from ..responses.response_input_audio_param import ResponseInputAudioParam
__all__ = [
"ScoreModelGraderParam",
"Input",
"InputContent",
"InputContentOutputText",
"InputContentInputImage",
"SamplingParams",
]
class InputContentOutputText(TypedDict, total=False):
"""A text output from the model."""
text: Required[str]
"""The text output from the model."""
type: Required[Literal["output_text"]]
"""The type of the output text. Always `output_text`."""
class InputContentInputImage(TypedDict, total=False):
"""An image input block used within EvalItem content arrays."""
image_url: Required[str]
"""The URL of the image input."""
type: Required[Literal["input_image"]]
"""The type of the image input. Always `input_image`."""
detail: str
"""The detail level of the image to be sent to the model.
One of `high`, `low`, or `auto`. Defaults to `auto`.
"""
InputContent: TypeAlias = Union[
str,
ResponseInputTextParam,
InputContentOutputText,
InputContentInputImage,
ResponseInputAudioParam,
GraderInputsParam,
]
class Input(TypedDict, total=False):
"""
A message input to the model with a role indicating instruction following
hierarchy. Instructions given with the `developer` or `system` role take
precedence over instructions given with the `user` role. Messages with the
`assistant` role are presumed to have been generated by the model in previous
interactions.
"""
content: Required[InputContent]
"""Inputs to the model - can contain template strings.
Supports text, output text, input images, and input audio, either as a single
item or an array of items.
"""
role: Required[Literal["user", "assistant", "system", "developer"]]
"""The role of the message input.
One of `user`, `assistant`, `system`, or `developer`.
"""
type: Literal["message"]
"""The type of the message input. Always `message`."""
class SamplingParams(TypedDict, total=False):
"""The sampling parameters for the model."""
max_completions_tokens: Optional[int]
"""The maximum number of tokens the grader model may generate in its response."""
reasoning_effort: Optional[ReasoningEffort]
"""
Constrains effort on reasoning for
[reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently
supported values are `none`, `minimal`, `low`, `medium`, `high`, and `xhigh`.
Reducing reasoning effort can result in faster responses and fewer tokens used
on reasoning in a response.
- `gpt-5.1` defaults to `none`, which does not perform reasoning. The supported
reasoning values for `gpt-5.1` are `none`, `low`, `medium`, and `high`. Tool
calls are supported for all reasoning values in gpt-5.1.
- All models before `gpt-5.1` default to `medium` reasoning effort, and do not
support `none`.
- The `gpt-5-pro` model defaults to (and only supports) `high` reasoning effort.
- `xhigh` is supported for all models after `gpt-5.1-codex-max`.
"""
seed: Optional[int]
"""A seed value to initialize the randomness, during sampling."""
temperature: Optional[float]
"""A higher temperature increases randomness in the outputs."""
top_p: Optional[float]
"""An alternative to temperature for nucleus sampling; 1.0 includes all tokens."""
class ScoreModelGraderParam(TypedDict, total=False):
"""A ScoreModelGrader object that uses a model to assign a score to the input."""
input: Required[Iterable[Input]]
"""The input messages evaluated by the grader.
Supports text, output text, input image, and input audio content blocks, and may
include template strings.
"""
model: Required[str]
"""The model to use for the evaluation."""
name: Required[str]
"""The name of the grader."""
type: Required[Literal["score_model"]]
"""The object type, which is always `score_model`."""
range: Iterable[float]
"""The range of the score. Defaults to `[0, 1]`."""
sampling_params: SamplingParams
"""The sampling parameters for the model."""
| {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/graders/score_model_grader_param.py",
"license": "Apache License 2.0",
"lines": 101,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
openai/openai-python:tests/api_resources/fine_tuning/alpha/test_graders.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
import os
from typing import Any, cast
import pytest
from openai import OpenAI, AsyncOpenAI
from tests.utils import assert_matches_type
from openai.types.fine_tuning.alpha import (
GraderRunResponse,
GraderValidateResponse,
)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
class TestGraders:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
@parametrize
def test_method_run(self, client: OpenAI) -> None:
grader = client.fine_tuning.alpha.graders.run(
grader={
"input": "input",
"name": "name",
"operation": "eq",
"reference": "reference",
"type": "string_check",
},
model_sample="model_sample",
)
assert_matches_type(GraderRunResponse, grader, path=["response"])
@parametrize
def test_method_run_with_all_params(self, client: OpenAI) -> None:
grader = client.fine_tuning.alpha.graders.run(
grader={
"input": "input",
"name": "name",
"operation": "eq",
"reference": "reference",
"type": "string_check",
},
model_sample="model_sample",
item={},
)
assert_matches_type(GraderRunResponse, grader, path=["response"])
@parametrize
def test_raw_response_run(self, client: OpenAI) -> None:
response = client.fine_tuning.alpha.graders.with_raw_response.run(
grader={
"input": "input",
"name": "name",
"operation": "eq",
"reference": "reference",
"type": "string_check",
},
model_sample="model_sample",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
grader = response.parse()
assert_matches_type(GraderRunResponse, grader, path=["response"])
@parametrize
def test_streaming_response_run(self, client: OpenAI) -> None:
with client.fine_tuning.alpha.graders.with_streaming_response.run(
grader={
"input": "input",
"name": "name",
"operation": "eq",
"reference": "reference",
"type": "string_check",
},
model_sample="model_sample",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
grader = response.parse()
assert_matches_type(GraderRunResponse, grader, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_method_validate(self, client: OpenAI) -> None:
grader = client.fine_tuning.alpha.graders.validate(
grader={
"input": "input",
"name": "name",
"operation": "eq",
"reference": "reference",
"type": "string_check",
},
)
assert_matches_type(GraderValidateResponse, grader, path=["response"])
@parametrize
def test_method_validate_with_all_params(self, client: OpenAI) -> None:
grader = client.fine_tuning.alpha.graders.validate(
grader={
"input": "input",
"name": "name",
"operation": "eq",
"reference": "reference",
"type": "string_check",
},
)
assert_matches_type(GraderValidateResponse, grader, path=["response"])
@parametrize
def test_raw_response_validate(self, client: OpenAI) -> None:
response = client.fine_tuning.alpha.graders.with_raw_response.validate(
grader={
"input": "input",
"name": "name",
"operation": "eq",
"reference": "reference",
"type": "string_check",
},
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
grader = response.parse()
assert_matches_type(GraderValidateResponse, grader, path=["response"])
@parametrize
def test_streaming_response_validate(self, client: OpenAI) -> None:
with client.fine_tuning.alpha.graders.with_streaming_response.validate(
grader={
"input": "input",
"name": "name",
"operation": "eq",
"reference": "reference",
"type": "string_check",
},
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
grader = response.parse()
assert_matches_type(GraderValidateResponse, grader, path=["response"])
assert cast(Any, response.is_closed) is True
class TestAsyncGraders:
parametrize = pytest.mark.parametrize(
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
@parametrize
async def test_method_run(self, async_client: AsyncOpenAI) -> None:
grader = await async_client.fine_tuning.alpha.graders.run(
grader={
"input": "input",
"name": "name",
"operation": "eq",
"reference": "reference",
"type": "string_check",
},
model_sample="model_sample",
)
assert_matches_type(GraderRunResponse, grader, path=["response"])
@parametrize
async def test_method_run_with_all_params(self, async_client: AsyncOpenAI) -> None:
grader = await async_client.fine_tuning.alpha.graders.run(
grader={
"input": "input",
"name": "name",
"operation": "eq",
"reference": "reference",
"type": "string_check",
},
model_sample="model_sample",
item={},
)
assert_matches_type(GraderRunResponse, grader, path=["response"])
@parametrize
async def test_raw_response_run(self, async_client: AsyncOpenAI) -> None:
response = await async_client.fine_tuning.alpha.graders.with_raw_response.run(
grader={
"input": "input",
"name": "name",
"operation": "eq",
"reference": "reference",
"type": "string_check",
},
model_sample="model_sample",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
grader = response.parse()
assert_matches_type(GraderRunResponse, grader, path=["response"])
@parametrize
async def test_streaming_response_run(self, async_client: AsyncOpenAI) -> None:
async with async_client.fine_tuning.alpha.graders.with_streaming_response.run(
grader={
"input": "input",
"name": "name",
"operation": "eq",
"reference": "reference",
"type": "string_check",
},
model_sample="model_sample",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
grader = await response.parse()
assert_matches_type(GraderRunResponse, grader, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_method_validate(self, async_client: AsyncOpenAI) -> None:
grader = await async_client.fine_tuning.alpha.graders.validate(
grader={
"input": "input",
"name": "name",
"operation": "eq",
"reference": "reference",
"type": "string_check",
},
)
assert_matches_type(GraderValidateResponse, grader, path=["response"])
@parametrize
async def test_method_validate_with_all_params(self, async_client: AsyncOpenAI) -> None:
grader = await async_client.fine_tuning.alpha.graders.validate(
grader={
"input": "input",
"name": "name",
"operation": "eq",
"reference": "reference",
"type": "string_check",
},
)
assert_matches_type(GraderValidateResponse, grader, path=["response"])
@parametrize
async def test_raw_response_validate(self, async_client: AsyncOpenAI) -> None:
response = await async_client.fine_tuning.alpha.graders.with_raw_response.validate(
grader={
"input": "input",
"name": "name",
"operation": "eq",
"reference": "reference",
"type": "string_check",
},
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
grader = response.parse()
assert_matches_type(GraderValidateResponse, grader, path=["response"])
@parametrize
async def test_streaming_response_validate(self, async_client: AsyncOpenAI) -> None:
async with async_client.fine_tuning.alpha.graders.with_streaming_response.validate(
grader={
"input": "input",
"name": "name",
"operation": "eq",
"reference": "reference",
"type": "string_check",
},
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
grader = await response.parse()
assert_matches_type(GraderValidateResponse, grader, path=["response"])
assert cast(Any, response.is_closed) is True
| {
"repo_id": "openai/openai-python",
"file_path": "tests/api_resources/fine_tuning/alpha/test_graders.py",
"license": "Apache License 2.0",
"lines": 248,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:src/serena/project_server.py | import json
import logging
from typing import TYPE_CHECKING
import requests as requests_lib
from flask import Flask, request
from pydantic import BaseModel
from sensai.util.logging import LogTime
from serena.config.serena_config import LanguageBackend, SerenaConfig
from serena.jetbrains.jetbrains_plugin_client import JetBrainsPluginClient
if TYPE_CHECKING:
from serena.project import Project
log = logging.getLogger(__name__)
# disable Werkzeug's logging to avoid cluttering the output
logging.getLogger("werkzeug").setLevel(logging.WARNING)
class QueryProjectRequest(BaseModel):
"""
Request model for the /query_project endpoint, matching the interface of
:class:`~serena.tools.query_project_tools.QueryProjectTool`.
"""
project_name: str
tool_name: str
tool_params_json: str
class ProjectServer:
"""
A lightweight Flask server that exposes a SerenaAgent's project querying
capabilities via HTTP, using the LSP language server backend for symbolic retrieval.
Projects are loaded on demand when a query is made for them, and cached in memory for subsequent queries.
The server instantiates a :class:`SerenaAgent` with default options and
provides a ``/query_project`` endpoint whose interface matches
:class:`~serena.tools.query_project_tools.QueryProjectTool`.
"""
PORT = JetBrainsPluginClient.BASE_PORT - 1
def __init__(self) -> None:
from serena.agent import SerenaAgent
serena_config = SerenaConfig.from_config_file()
serena_config.gui_log_window = False
serena_config.web_dashboard = False
serena_config.language_backend = LanguageBackend.LSP
self._agent = SerenaAgent(serena_config=serena_config)
self._loaded_projects_by_name: dict[str, "Project"] = {}
# create the Flask application
self._app = Flask(__name__)
self._setup_routes()
def _setup_routes(self) -> None:
@self._app.route("/heartbeat", methods=["GET"])
def heartbeat() -> dict[str, str]:
return {"status": "alive"}
@self._app.route("/query_project", methods=["POST"])
def query_project() -> str:
query_request = QueryProjectRequest.model_validate(request.get_json())
return self._query_project(query_request)
def _get_project(self, project_name: str) -> "Project":
"""Gets the project with the given name, loading it if necessary."""
if project_name in self._loaded_projects_by_name:
return self._loaded_projects_by_name[project_name]
else:
serena_config = self._agent.serena_config
registered_project = serena_config.get_registered_project(project_name)
if registered_project is None:
raise ValueError(f"Project '{project_name}' is not registered with Serena.")
with LogTime(f"Loading project '{project_name}'"):
project = registered_project.get_project_instance(serena_config)
project.create_language_server_manager()
self._loaded_projects_by_name[project_name] = project
return project
def _query_project(self, req: QueryProjectRequest) -> str:
"""Handle a /query_project request by invoking the agent on the specified project and tool."""
project = self._get_project(req.project_name)
with self._agent.active_project_context(project):
tool = self._agent.get_tool_by_name(req.tool_name)
params = json.loads(req.tool_params_json)
return tool.apply_ex(**params)
def run(self, host: str = "127.0.0.1", port: int = PORT) -> int:
"""Run the server on the given host and port.
:param host: the host address to listen on.
:param port: the port to listen on.
:return: the port number the server is running on.
"""
from flask import cli
# suppress the default Flask startup banner
cli.show_server_banner = lambda *args, **kwargs: None
self._app.run(host=host, port=port, debug=False, use_reloader=False, threaded=True)
return port
class ProjectServerClient:
"""Client for interacting with a running :class:`ProjectServer`.
Upon instantiation, the client verifies that the server is reachable
by sending a heartbeat request. If the server is not running, a
:class:`ConnectionError` is raised.
"""
def __init__(self, host: str = "127.0.0.1", port: int = ProjectServer.PORT, timeout: int = 300) -> None:
"""
:param host: the host address of the project server.
:param port: the port of the project server.
:raises ConnectionError: if the project server is not reachable.
"""
self._base_url = f"http://{host}:{port}"
self._timeout = timeout
# verify that the server is running
try:
response = requests_lib.get(f"{self._base_url}/heartbeat", timeout=5)
response.raise_for_status()
except requests_lib.ConnectionError:
raise ConnectionError(f"ProjectServer is not reachable at {self._base_url}. Make sure the server is running.")
except requests_lib.RequestException as e:
raise ConnectionError(f"ProjectServer health check failed: {e}")
def query_project(self, project_name: str, tool_name: str, tool_params_json: str) -> str:
"""
Query a project by executing a Serena tool in its context.
The interface matches :meth:`QueryProjectTool.apply
<serena.tools.query_project_tools.QueryProjectTool.apply>`.
:param project_name: the name of the project to query.
:param tool_name: the name of the tool to execute. The tool must be read-only.
:param tool_params_json: the parameters to pass to the tool, encoded as a JSON string.
:return: the tool's result as a string.
"""
payload = QueryProjectRequest(
project_name=project_name,
tool_name=tool_name,
tool_params_json=tool_params_json,
).model_dump()
response = requests_lib.post(f"{self._base_url}/query_project", json=payload, timeout=self._timeout)
response.raise_for_status()
return response.text
| {
"repo_id": "oraios/serena",
"file_path": "src/serena/project_server.py",
"license": "MIT License",
"lines": 123,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/serena/tools/query_project_tools.py | import json
from serena.config.serena_config import LanguageBackend
from serena.jetbrains.jetbrains_plugin_client import JetBrainsPluginClientManager
from serena.project_server import ProjectServerClient
from serena.tools import Tool, ToolMarkerDoesNotRequireActiveProject, ToolMarkerOptional
class ListQueryableProjectsTool(Tool, ToolMarkerOptional, ToolMarkerDoesNotRequireActiveProject):
"""
Tool for listing all projects that can be queried by the QueryProjectTool.
"""
def apply(self, symbol_access: bool = True) -> str:
"""
Lists available projects that can be queried with `query_project_tool`.
:param symbol_access: whether to return only projects for which symbol access is available. Default: true
"""
# determine relevant projects
registered_projects = self.agent.serena_config.projects
if symbol_access:
backend = self.agent.get_language_backend()
if backend.is_jetbrains():
# projects with open IDE instances can be queried
matched_clients = JetBrainsPluginClientManager().match_clients(registered_projects)
relevant_projects = [mc.registered_project for mc in matched_clients]
else:
# all projects can be queried via ProjectServer (which instantiates projects dynamically)
relevant_projects = registered_projects
else:
relevant_projects = registered_projects
# return project names, excluding the active project (if any)
project_names = [p.project_name for p in relevant_projects]
active_project = self.agent.get_active_project()
if active_project is not None:
project_names = [n for n in project_names if n != active_project.project_name]
return self._to_json(project_names)
class QueryProjectTool(Tool, ToolMarkerOptional, ToolMarkerDoesNotRequireActiveProject):
"""
Tool for querying external project information (i.e. information from projects other than the current one),
by executing a read-only tool.
"""
def apply(self, project_name: str, tool_name: str, tool_params_json: str) -> str:
"""
Queries a project by executing a read-only Serena tool. The tool will be executed in the context of the project.
Use this to query information from projects other than the activated project.
:param project_name: the name of the project to query
:param tool_name: the name of the tool to execute in the other project. The tool must be read-only.
:param tool_params_json: the parameters to pass to the tool, encoded as a JSON string
"""
tool = self.agent.get_tool_by_name(tool_name)
assert tool.is_active(), f"Tool {tool_name} is not active."
assert tool.is_readonly(), f"Tool {tool_name} is not read-only and cannot be executed in another project."
if self._is_project_server_required(tool):
client = ProjectServerClient()
return client.query_project(project_name, tool_name, tool_params_json)
else:
registered_project = self.agent.serena_config.get_registered_project(project_name)
assert registered_project is not None, f"Project {project_name} is not registered and cannot be queried."
project = registered_project.get_project_instance(self.agent.serena_config)
with tool.agent.active_project_context(project):
return tool.apply(**json.loads(tool_params_json)) # type: ignore
def _is_project_server_required(self, tool: Tool) -> bool:
match self.agent.get_language_backend():
case LanguageBackend.JETBRAINS:
return False
case LanguageBackend.LSP:
# Note: As long as only read-only tools are considered, only symbolic tools require the project server.
# But if we were to allow non-read-only tools, then tools using a CodeEditor also indirectly require language servers.
assert tool.is_readonly()
return tool.is_symbolic()
case _:
raise NotImplementedError
| {
"repo_id": "oraios/serena",
"file_path": "src/serena/tools/query_project_tools.py",
"license": "MIT License",
"lines": 69,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/serena/util/dotnet.py | import logging
import platform
import re
import shutil
import subprocess
import urllib
from pathlib import Path
from serena.util.version import Version
from solidlsp.ls_exceptions import SolidLSPException
log = logging.getLogger(__name__)
class DotNETUtil:
def __init__(self, required_version: str, allow_higher_version: bool = True):
"""
:param required_version: the required .NET runtime version specified as a string (e.g. "10.0" for .NET 10.0)
:param allow_higher_version: whether to allow higher versions than the required version
"""
self._system_dotnet = shutil.which("dotnet")
self._required_version_str = required_version
self._required_version_components = [int(c) for c in required_version.split(".")]
self._allow_higher_version = allow_higher_version
self._installed_versions = self._determine_installed_versions()
def _determine_installed_versions(self) -> list[Version]:
if self._system_dotnet:
try:
result = subprocess.run([self._system_dotnet, "--list-runtimes"], capture_output=True, text=True, check=True)
version_strings = re.findall(r"Microsoft.NETCore.App\s+([^\s]+)", result.stdout)
log.info("Installed .NET runtime versions: %s", version_strings)
return [Version(v) for v in version_strings]
except:
log.warning("Failed to run 'dotnet --list-runtimes' to check .NET version; assuming no installed .NET versions")
return []
else:
log.info("Found no `dotnet` on system PATH; assuming no installed .NET versions")
return []
def is_required_version_available(self) -> bool:
"""
Checks whether the required .NET runtime version is installed and raises an exception if not.
:param required_version_components: the required .NET runtime version specified as a list of integers representing the version components (e.g., [6, 1] for .NET 6.1)
:param allow_higher_version: whether to allow higher versions than the required version (e.g., if True, .NET 7.0 would satisfy a requirement of .NET 6.1)
"""
required_version_str = ".".join(str(c) for c in self._required_version_components)
for v in self._installed_versions:
if self._allow_higher_version:
if v.is_at_least(*self._required_version_components):
log.info(f"Found installed .NET runtime version {v} which satisfies requirement of {required_version_str} or higher")
return True
else:
if v.is_equal(*self._required_version_components):
log.info(f"Found installed .NET runtime version {v} which satisfies requirement of {required_version_str}")
return True
return False
def get_dotnet_path_or_raise(self) -> str:
"""
Returns the path to the dotnet executable if the required .NET runtime version is available, otherwise raises an exception.
"""
if not self.is_required_version_available():
raise SolidLSPException(
f"Required .NET runtime version {self._required_version_str} not found "
f"(installed versions: {self._installed_versions}). "
"Please install the required .NET runtime version from https://dotnet.microsoft.com/en-us/download/dotnet "
"and ensure that `dotnet` is on the system PATH."
)
assert self._system_dotnet is not None
return self._system_dotnet
@staticmethod
def install_dotnet_with_script(version: str, base_path: str) -> str:
"""
Install .NET runtime using Microsoft's official installation script.
NOTE: This method is unreliable and therefore currently unused. It is kept for reference.
:version: the version to install as a string (e.g. "10.0")
:return: the path to the dotnet executable.
"""
dotnet_dir = Path(base_path) / f"dotnet-runtime-{version}"
# Determine binary name based on platform
is_windows = platform.system().lower() == "windows"
dotnet_exe = dotnet_dir / ("dotnet.exe" if is_windows else "dotnet")
if dotnet_exe.exists():
log.info(f"Using cached .NET {version} runtime from {dotnet_exe}")
return str(dotnet_exe)
# Download and run install script
log.info(f"Installing .NET {version} runtime using official Microsoft install script...")
dotnet_dir.mkdir(parents=True, exist_ok=True)
try:
if is_windows:
# PowerShell script for Windows
script_url = "https://dot.net/v1/dotnet-install.ps1"
script_path = dotnet_dir / "dotnet-install.ps1"
urllib.request.urlretrieve(script_url, script_path)
cmd = [
"pwsh",
"-NoProfile",
"-ExecutionPolicy",
"Bypass",
"-File",
str(script_path),
"-Version",
version,
"-InstallDir",
str(dotnet_dir),
"-Runtime",
"dotnet",
"-NoPath",
]
else:
# Bash script for Linux/macOS
script_url = "https://dot.net/v1/dotnet-install.sh"
script_path = dotnet_dir / "dotnet-install.sh"
urllib.request.urlretrieve(script_url, script_path)
script_path.chmod(0o755)
cmd = [
"bash",
str(script_path),
"--version",
version,
"--install-dir",
str(dotnet_dir),
"--runtime",
"dotnet",
"--no-path",
]
# Run the install script
log.info("Running .NET install script: %s", cmd)
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
log.debug(f"Install script output: {result.stdout}")
if not dotnet_exe.exists():
raise SolidLSPException(f"dotnet executable not found at {dotnet_exe} after installation")
log.info(f"Successfully installed .NET {version} runtime to {dotnet_exe}")
return str(dotnet_exe)
except subprocess.CalledProcessError as e:
raise SolidLSPException(f"Failed to install .NET {version} runtime using install script: {e.stderr if e.stderr else e}") from e
except Exception as e:
message = f"Failed to install .NET {version} runtime: {e}"
if is_windows and isinstance(e, FileNotFoundError):
message += "; pwsh, i.e. PowerShell 7+, is required to install .NET runtime. Make sure pwsh is available on your system."
raise SolidLSPException(message) from e
| {
"repo_id": "oraios/serena",
"file_path": "src/serena/util/dotnet.py",
"license": "MIT License",
"lines": 135,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/hlsl_language_server.py | """
Shader language server using shader-language-server (antaalt/shader-sense).
Supports HLSL, GLSL, and WGSL shader file formats.
"""
import logging
import os
import pathlib
import shutil
from typing import Any, cast
from overrides import override
from solidlsp.ls import LanguageServerDependencyProvider, LanguageServerDependencyProviderSinglePath, SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.settings import SolidLSPSettings
from .common import RuntimeDependency, RuntimeDependencyCollection
log = logging.getLogger(__name__)
# GitHub release version to download when not installed locally
_DEFAULT_VERSION = "1.3.0"
_GITHUB_RELEASE_BASE = "https://github.com/antaalt/shader-sense/releases/download"
class HlslLanguageServer(SolidLanguageServer):
"""
Shader language server using shader-language-server.
Supports .hlsl, .hlsli, .fx, .fxh, .cginc, .compute, .shader, .glsl, .vert, .frag, .geom, .tesc, .tese, .comp, .wgsl files.
"""
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings) -> None:
super().__init__(config, repository_root_path, None, "hlsl", solidlsp_settings)
@override
def _create_dependency_provider(self) -> LanguageServerDependencyProvider:
return self.DependencyProvider(self._custom_settings, self._ls_resources_dir)
class DependencyProvider(LanguageServerDependencyProviderSinglePath):
def _get_or_install_core_dependency(self) -> str:
# 1. Check PATH for system-installed binary
system_binary = shutil.which("shader-language-server")
if system_binary:
log.info(f"Using system-installed shader-language-server at {system_binary}")
return system_binary
# 2. Try to download pre-built binary from GitHub releases
version = self._custom_settings.get("version", _DEFAULT_VERSION)
tag = f"v{version}"
base_url = f"{_GITHUB_RELEASE_BASE}/{tag}"
# macOS has no pre-built binaries; build from source via cargo install
cargo_install_cmd = f"cargo install shader_language_server --version {version} --root ."
deps = RuntimeDependencyCollection(
[
RuntimeDependency(
id="shader-language-server",
description="shader-language-server for Windows (x64)",
url=f"{base_url}/shader-language-server-x86_64-pc-windows-msvc.zip",
platform_id="win-x64",
archive_type="zip",
binary_name="shader-language-server.exe",
),
RuntimeDependency(
id="shader-language-server",
description="shader-language-server for Linux (x64)",
url=f"{base_url}/shader-language-server-x86_64-unknown-linux-gnu.zip",
platform_id="linux-x64",
archive_type="zip",
binary_name="shader-language-server",
),
RuntimeDependency(
id="shader-language-server",
description="shader-language-server for Windows (ARM64)",
url=f"{base_url}/shader-language-server-aarch64-pc-windows-msvc.zip",
platform_id="win-arm64",
archive_type="zip",
binary_name="shader-language-server.exe",
),
RuntimeDependency(
id="shader-language-server",
description="shader-language-server for macOS (x64) - built from source",
command=cargo_install_cmd,
platform_id="osx-x64",
binary_name="bin/shader-language-server",
),
RuntimeDependency(
id="shader-language-server",
description="shader-language-server for macOS (ARM64) - built from source",
command=cargo_install_cmd,
platform_id="osx-arm64",
binary_name="bin/shader-language-server",
),
]
)
try:
dep = deps.get_single_dep_for_current_platform()
except RuntimeError:
dep = None
if dep is None:
raise FileNotFoundError(
"shader-language-server is not installed and no auto-install is available for your platform.\n"
"Please install it using one of the following methods:\n"
" cargo: cargo install shader_language_server\n"
" GitHub: Download from https://github.com/antaalt/shader-sense/releases\n"
"On macOS, install the Rust toolchain (https://rustup.rs) and Serena will build from source automatically.\n"
"See https://github.com/antaalt/shader-sense for more details."
)
install_dir = os.path.join(self._ls_resources_dir, "shader-language-server")
executable_path = deps.binary_path(install_dir)
if not os.path.exists(executable_path):
log.info(f"shader-language-server not found. Downloading from {dep.url}")
_ = deps.install(install_dir)
if not os.path.exists(executable_path):
raise FileNotFoundError(f"shader-language-server not found at {executable_path}")
os.chmod(executable_path, 0o755)
return executable_path
def _create_launch_command(self, core_path: str) -> list[str]:
return [core_path, "--stdio"]
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"locale": "en",
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"completion": {
"dynamicRegistration": True,
"completionItem": {"snippetSupport": True},
},
"definition": {"dynamicRegistration": True},
"hover": {
"dynamicRegistration": True,
"contentFormat": ["markdown", "plaintext"],
},
"signatureHelp": {
"dynamicRegistration": True,
"signatureInformation": {
"parameterInformation": {"labelOffsetSupport": True},
},
},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
"formatting": {"dynamicRegistration": True},
"publishDiagnostics": {"relatedInformation": True},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"configuration": True,
},
},
"workspaceFolders": [{"uri": root_uri, "name": os.path.basename(repository_absolute_path)}],
}
return cast(InitializeParams, initialize_params)
@override
def _start_server(self) -> None:
def do_nothing(params: Any) -> None:
return
def on_log_message(params: Any) -> None:
message = params.get("message", "") if isinstance(params, dict) else str(params)
log.info(f"shader-language-server: {message}")
def on_configuration_request(params: Any) -> list[dict]:
"""Respond to workspace/configuration requests.
shader-language-server requests config with section 'shader-validator'.
Return empty config to use defaults.
"""
items = params.get("items", []) if isinstance(params, dict) else []
return [{}] * len(items)
self.server.on_request("client/registerCapability", do_nothing)
self.server.on_request("workspace/configuration", on_configuration_request)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
self.server.on_notification("window/logMessage", on_log_message)
log.info("Starting shader-language-server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request")
init_response = self.server.send.initialize(initialize_params)
capabilities = init_response.get("capabilities", {})
log.info(f"Initialize response capabilities: {list(capabilities.keys())}")
assert "textDocumentSync" in capabilities, "shader-language-server must support textDocumentSync"
if "documentSymbolProvider" not in capabilities:
log.warning("shader-language-server does not advertise documentSymbolProvider")
if "definitionProvider" not in capabilities:
log.warning("shader-language-server does not advertise definitionProvider")
self.server.notify.initialized({})
@override
def is_ignored_dirname(self, dirname: str) -> bool:
"""Ignore Unity-specific directories that contain no user-authored shaders."""
return super().is_ignored_dirname(dirname) or dirname in {"Library", "Temp", "Logs", "obj", "Packages"}
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/hlsl_language_server.py",
"license": "MIT License",
"lines": 187,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/ocaml_lsp_server.py | """
Provides OCaml and Reason specific instantiation of the SolidLanguageServer class.
Contains various configurations and settings specific to OCaml and Reason.
"""
import logging
import os
import pathlib
import platform
import re
import shutil
import stat
import subprocess
import threading
from typing import Any
from overrides import override
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
from solidlsp.util.subprocess_util import subprocess_kwargs
log = logging.getLogger(__name__)
class OcamlLanguageServer(SolidLanguageServer):
"""
Provides OCaml and Reason specific instantiation of the SolidLanguageServer class.
Contains various configurations and settings specific to OCaml and Reason.
"""
_ocaml_version: tuple[int, int, int]
_lsp_version: tuple[int, int, int]
_index_built: bool
# Minimum LSP version for reliable cross-file references
MIN_LSP_VERSION_FOR_CROSS_FILE_REFS: tuple[int, int, int] = (1, 23, 0)
@staticmethod
def _ensure_opam_installed() -> None:
"""Ensure OPAM is installed and available."""
opam_path = shutil.which("opam")
if opam_path is None:
raise RuntimeError(
"OPAM is not installed or not in PATH.\n"
"Please install OPAM from: https://opam.ocaml.org/doc/Install.html\n\n"
"Installation instructions:\n"
" - macOS: brew install opam\n"
" - Ubuntu/Debian: sudo apt install opam\n"
" - Fedora: sudo dnf install opam\n"
" - Windows: https://fdopen.github.io/opam-repository-mingw/installation/\n\n"
"After installation, initialize OPAM with: opam init"
)
@staticmethod
def _detect_ocaml_version(repository_root_path: str) -> tuple[int, int, int]:
"""
Detect and return the OCaml version as a tuple (major, minor, patch).
Also checks for version compatibility with ocaml-lsp-server.
Raises RuntimeError if version cannot be determined.
"""
try:
result = subprocess.run(
["opam", "exec", "--", "ocaml", "-version"],
check=True,
capture_output=True,
text=True,
cwd=repository_root_path,
**subprocess_kwargs(),
)
version_match = re.search(r"(\d+)\.(\d+)\.(\d+)", result.stdout)
if version_match:
major = int(version_match.group(1))
minor = int(version_match.group(2))
patch = int(version_match.group(3))
version_tuple = (major, minor, patch)
version_str = f"{major}.{minor}.{patch}"
log.info(f"OCaml version: {version_str}")
if version_tuple == (5, 1, 0):
raise RuntimeError(
f"OCaml {version_str} is incompatible with ocaml-lsp-server.\n"
"Please use OCaml < 5.1 or >= 5.1.1.\n"
"Consider creating a new opam switch:\n"
" opam switch create <name> ocaml-base-compiler.4.14.2"
)
return version_tuple
raise RuntimeError(
f"Could not parse OCaml version from output: {result.stdout.strip()}\n"
"Please ensure OCaml is properly installed: opam exec -- ocaml -version"
)
except subprocess.CalledProcessError as e:
raise RuntimeError(
f"Failed to detect OCaml version: {e.stderr}\n"
"Please ensure OCaml is installed and opam is configured:\n"
" opam switch show\n"
" opam exec -- ocaml -version"
) from e
except FileNotFoundError as e:
raise RuntimeError(
"OCaml not found. Please install OCaml via opam:\n"
" opam switch create <name> ocaml-base-compiler.4.14.2\n"
" eval $(opam env)"
) from e
@staticmethod
def _detect_lsp_version(repository_root_path: str) -> tuple[int, int, int]:
"""
Detect and return the ocaml-lsp-server version as a tuple (major, minor, patch).
Raises RuntimeError if version cannot be determined.
"""
try:
result = subprocess.run(
["opam", "list", "-i", "ocaml-lsp-server", "--columns=version", "--short"],
check=True,
capture_output=True,
text=True,
cwd=repository_root_path,
**subprocess_kwargs(),
)
version_str = result.stdout.strip()
version_match = re.search(r"(\d+)\.(\d+)\.(\d+)", version_str)
if version_match:
major = int(version_match.group(1))
minor = int(version_match.group(2))
patch = int(version_match.group(3))
version_tuple = (major, minor, patch)
log.info(f"ocaml-lsp-server version: {major}.{minor}.{patch}")
return version_tuple
raise RuntimeError(
f"Could not parse ocaml-lsp-server version from output: {version_str}\n"
"Please ensure ocaml-lsp-server is properly installed:\n"
" opam list -i ocaml-lsp-server"
)
except subprocess.CalledProcessError as e:
raise RuntimeError(
f"Failed to detect ocaml-lsp-server version: {e.stderr}\n"
"Please install ocaml-lsp-server:\n"
" opam install ocaml-lsp-server"
) from e
except FileNotFoundError as e:
raise RuntimeError("opam not found. Please install opam:\n https://opam.ocaml.org/doc/Install.html") from e
@staticmethod
def _ensure_ocaml_lsp_installed(repository_root_path: str) -> str:
"""
Ensure ocaml-lsp-server is installed and return the executable path.
Raises RuntimeError with helpful message if not installed.
"""
# Check if ocaml-lsp-server is installed
try:
result = subprocess.run(
["opam", "list", "-i", "ocaml-lsp-server"],
check=False,
capture_output=True,
text=True,
cwd=repository_root_path,
**subprocess_kwargs(),
)
if "ocaml-lsp-server" not in result.stdout or "# No matches found" in result.stdout:
raise RuntimeError(
"ocaml-lsp-server is not installed.\n\n"
"Please install it with:\n"
" opam install ocaml-lsp-server\n\n"
"Note: ocaml-lsp-server requires OCaml < 5.1 or >= 5.1.1 (OCaml 5.1.0 is not supported).\n"
"If you have OCaml 5.1.0, create a new opam switch with a compatible version:\n"
" opam switch create <name> ocaml-base-compiler.4.14.2\n"
" opam switch <name>\n"
" eval $(opam env)\n"
" opam install ocaml-lsp-server\n\n"
"For more information: https://github.com/ocaml/ocaml-lsp"
)
log.info("ocaml-lsp-server is installed")
except subprocess.CalledProcessError as e:
raise RuntimeError(f"Failed to check ocaml-lsp-server installation: {e.stderr}")
# Find the executable path
try:
if platform.system() == "Windows":
result = subprocess.run(
["opam", "exec", "--", "where", "ocamllsp"],
check=True,
capture_output=True,
text=True,
cwd=repository_root_path,
**subprocess_kwargs(),
)
executable_path = result.stdout.strip().split("\n")[0]
else:
result = subprocess.run(
["opam", "exec", "--", "which", "ocamllsp"],
check=True,
capture_output=True,
text=True,
cwd=repository_root_path,
**subprocess_kwargs(),
)
executable_path = result.stdout.strip()
if not os.path.exists(executable_path):
raise RuntimeError(f"ocaml-lsp-server executable not found at {executable_path}")
if platform.system() != "Windows":
os.chmod(executable_path, os.stat(executable_path).st_mode | stat.S_IEXEC)
return executable_path
except subprocess.CalledProcessError as e:
raise RuntimeError(
f"Failed to find ocaml-lsp-server executable.\n"
f"Command failed: {e.cmd}\n"
f"Return code: {e.returncode}\n"
f"Stderr: {e.stderr}\n\n"
"This usually means ocaml-lsp-server is not installed or not in PATH.\n"
"Try:\n"
" 1. Check opam switch: opam switch show\n"
" 2. Install ocaml-lsp-server: opam install ocaml-lsp-server\n"
" 3. Ensure opam env is activated: eval $(opam env)"
)
@property
def supports_cross_file_references(self) -> bool:
"""
Check if this OCaml environment supports cross-file references.
Cross-file references require OCaml >= 5.2 with project-wide occurrences
AND ocaml-lsp-server >= 1.23.0 for reliable cross-file reference support.
Full requirements:
- OCaml 5.2+
- ocaml-lsp-server >= 1.23.0 (earlier versions have unreliable cross-file refs)
- merlin >= 5.1-502 (provides ocaml-index tool)
- dune >= 3.16.0
- Index built via `dune build @ocaml-index`
- For best results: `dune build -w` running (enables dune RPC)
Note: Even when this returns True, cross-file refs may not work in all
cases. The LSP server needs dune's RPC server (via -w flag) to be fully
aware of the index. Without watch mode, cross-file refs are best-effort.
See: https://discuss.ocaml.org/t/ann-project-wide-occurrences-in-merlin-and-lsp/14847
"""
ocaml_ok = self._ocaml_version >= (5, 2, 0)
lsp_ok = self._lsp_version >= self.MIN_LSP_VERSION_FOR_CROSS_FILE_REFS
return ocaml_ok and lsp_ok
@staticmethod
def _build_ocaml_index_static(repository_root_path: str) -> bool:
"""
Build the OCaml index for project-wide occurrences.
This enables cross-file reference finding on OCaml 5.2+.
Must be called BEFORE starting the LSP server.
Returns True if successful, False otherwise.
"""
log.info("Building OCaml index for cross-file references (dune build @ocaml-index)...")
try:
result = subprocess.run(
["opam", "exec", "--", "dune", "build", "@ocaml-index"],
cwd=repository_root_path,
capture_output=True,
text=True,
timeout=120,
check=False,
**subprocess_kwargs(),
)
if result.returncode == 0:
log.info("OCaml index built successfully")
return True
else:
log.warning(f"Failed to build OCaml index: {result.stderr}")
return False
except subprocess.TimeoutExpired:
log.warning("OCaml index build timed out after 120 seconds")
return False
except FileNotFoundError:
log.warning("opam not found, cannot build OCaml index")
return False
except Exception as e:
log.warning(f"Error building OCaml index: {e}")
return False
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Creates an OcamlLanguageServer instance.
This class is not meant to be instantiated directly. Use SolidLanguageServer.create() instead.
"""
# Ensure dependencies are available
self._ensure_opam_installed()
# Detect OCaml version for feature gating
self._ocaml_version = self._detect_ocaml_version(repository_root_path)
self._index_built = False
# Verify ocaml-lsp-server is installed (we don't need the path, just validation)
self._ensure_ocaml_lsp_installed(repository_root_path)
# Detect LSP version for cross-file reference support
self._lsp_version = self._detect_lsp_version(repository_root_path)
# Build OCaml index BEFORE starting server (required for cross-file refs on OCaml 5.2+)
if self._ocaml_version >= (5, 2, 0):
self._index_built = self._build_ocaml_index_static(repository_root_path)
# Use opam exec to run ocamllsp - this ensures correct opam environment
# which is required for project-wide occurrences (cross-file references) to work
ocaml_lsp_cmd = ["opam", "exec", "--", "ocamllsp", "--fallback-read-dot-merlin"]
log.info(f"Using ocaml-lsp-server via: {' '.join(ocaml_lsp_cmd)}")
super().__init__(
config,
repository_root_path,
ProcessLaunchInfo(cmd=ocaml_lsp_cmd, cwd=repository_root_path),
"ocaml",
solidlsp_settings,
)
self.server_ready = threading.Event()
self.completions_available = threading.Event()
@override
def is_ignored_dirname(self, dirname: str) -> bool:
"""Define language-specific directories to ignore for OCaml projects."""
return super().is_ignored_dirname(dirname) or dirname in ["_build", "_opam", ".opam"]
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the OCaml Language Server.
Supports both OCaml and Reason.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"processId": os.getpid(),
"clientInfo": {"name": "Serena", "version": "0.1.0"},
"locale": "en",
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"capabilities": {
"workspace": {
"workspaceFolders": True,
"configuration": True,
},
"textDocument": {
"synchronization": {
"dynamicRegistration": True,
"willSave": True,
"willSaveWaitUntil": True,
"didSave": True,
},
"completion": {
"dynamicRegistration": True,
"completionItem": {
"snippetSupport": True,
"documentationFormat": ["markdown", "plaintext"],
},
},
"hover": {
"dynamicRegistration": True,
"contentFormat": ["markdown", "plaintext"],
},
"definition": {"dynamicRegistration": True, "linkSupport": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
},
"formatting": {"dynamicRegistration": True},
"rename": {"dynamicRegistration": True, "prepareSupport": True},
},
},
"trace": "verbose",
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
}
return initialize_params # type: ignore[return-value]
def _start_server(self) -> None:
"""
Starts the OCaml Language Server (supports both OCaml and Reason)
"""
def register_capability_handler(params: Any) -> None:
if "registrations" in params:
for registration in params.get("registrations", []):
method = registration.get("method", "")
log.info(f"OCaml LSP registered capability: {method}")
return
def lang_status_handler(params: dict[str, Any]) -> None:
if params.get("type") == "ServiceReady" and params.get("message") == "ServiceReady":
self.server_ready.set()
def do_nothing(params: Any) -> None:
return
def window_log_message(msg: dict[str, Any]) -> None:
log.info(f"LSP: window/logMessage: {msg}")
if "initialization done" in msg.get("message", "").lower():
self.server_ready.set()
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("language/status", lang_status_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting OCaml LSP server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
# Verify expected capabilities
capabilities = init_response.get("capabilities", {})
log.info(f"OCaml LSP capabilities: {list(capabilities.keys())}")
text_doc_sync = capabilities.get("textDocumentSync")
if isinstance(text_doc_sync, dict):
assert text_doc_sync.get("change") == 2, "Expected incremental sync"
assert "completionProvider" in capabilities, "Expected completion support"
self.server.notify.initialized({})
self.completions_available.set()
self.server_ready.set()
log.info("OCaml Language Server initialized successfully")
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/ocaml_lsp_server.py",
"license": "MIT License",
"lines": 388,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:test/solidlsp/hlsl/test_hlsl_basic.py | """
Basic tests for HLSL language server integration (shader-language-server).
This module tests Language.HLSL using shader-language-server from antaalt/shader-sense.
Tests are skipped if the language server is not available.
"""
from typing import Any
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
from solidlsp.ls_exceptions import SolidLSPException
from solidlsp.ls_utils import SymbolUtils
def _find_symbol_by_name(language_server: SolidLanguageServer, file_path: str, name: str) -> dict[str, Any] | None:
"""Find a top-level symbol by name in a file's document symbols."""
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
return next((s for s in symbols[0] if s.get("name") == name), None)
# ── Symbol Discovery ─────────────────────────────────────────────
@pytest.mark.hlsl
class TestHlslSymbols:
"""Tests for document symbol extraction."""
@pytest.mark.parametrize("language_server", [Language.HLSL], indirect=True)
def test_find_struct(self, language_server: SolidLanguageServer) -> None:
"""VertexInput struct should appear in common.hlsl symbols."""
symbol = _find_symbol_by_name(language_server, "common.hlsl", "VertexInput")
assert symbol is not None, "Expected 'VertexInput' struct in document symbols"
@pytest.mark.parametrize("language_server", [Language.HLSL], indirect=True)
def test_find_function(self, language_server: SolidLanguageServer) -> None:
"""SafeNormalize function should appear in common.hlsl."""
symbol = _find_symbol_by_name(language_server, "common.hlsl", "SafeNormalize")
assert symbol is not None, "Expected 'SafeNormalize' function in document symbols"
@pytest.mark.parametrize("language_server", [Language.HLSL], indirect=True)
def test_find_cbuffer_members(self, language_server: SolidLanguageServer) -> None:
"""Cbuffer members should appear as variables in compute_test.hlsl.
Note: shader-language-server reports cbuffer members as individual
variables (kind 13), not the cbuffer name itself as a symbol.
"""
symbol = _find_symbol_by_name(language_server, "compute_test.hlsl", "TextureSize")
assert symbol is not None, "Expected 'TextureSize' cbuffer member in document symbols"
@pytest.mark.parametrize("language_server", [Language.HLSL], indirect=True)
def test_find_compute_kernel(self, language_server: SolidLanguageServer) -> None:
"""CSMain kernel should appear in compute_test.hlsl."""
symbol = _find_symbol_by_name(language_server, "compute_test.hlsl", "CSMain")
assert symbol is not None, "Expected 'CSMain' compute kernel in document symbols"
@pytest.mark.parametrize("language_server", [Language.HLSL], indirect=True)
def test_full_symbol_tree(self, language_server: SolidLanguageServer) -> None:
"""Full symbol tree should contain symbols from multiple files."""
symbols = language_server.request_full_symbol_tree()
assert SymbolUtils.symbol_tree_contains_name(symbols, "VertexInput"), "VertexInput not in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "CalculateDiffuse"), "CalculateDiffuse not in symbol tree"
# ── Go-to-Definition ─────────────────────────────────────────────
@pytest.mark.hlsl
class TestHlslDefinition:
"""Tests for go-to-definition capability."""
@pytest.mark.parametrize("language_server", [Language.HLSL], indirect=True)
def test_goto_definition_cross_file(self, language_server: SolidLanguageServer) -> None:
"""Navigating to SafeNormalize call in lighting.hlsl should resolve to common.hlsl.
lighting.hlsl line 22 (0-indexed): " float3 halfVec = SafeNormalize(-lightDir + viewDir);"
SafeNormalize starts at column 21.
"""
definitions = language_server.request_definition("lighting.hlsl", 22, 21)
assert len(definitions) >= 1, f"Expected at least 1 definition, got {len(definitions)}"
def_paths = [d.get("relativePath", d.get("uri", "")) for d in definitions]
assert any("common.hlsl" in p for p in def_paths), f"Expected definition in common.hlsl, got: {def_paths}"
@pytest.mark.parametrize("language_server", [Language.HLSL], indirect=True)
def test_goto_definition_cross_file_remap(self, language_server: SolidLanguageServer) -> None:
"""Navigating to Remap call in compute_test.hlsl should resolve to common.hlsl.
compute_test.hlsl line 20 (0-indexed): " Remap(color.r, 0.0, 1.0, 0.2, 0.8),"
Remap starts at column 8.
"""
definitions = language_server.request_definition("compute_test.hlsl", 20, 8)
assert len(definitions) >= 1, f"Expected at least 1 definition, got {len(definitions)}"
def_paths = [d.get("relativePath", d.get("uri", "")) for d in definitions]
assert any("common.hlsl" in p for p in def_paths), f"Expected definition in common.hlsl, got: {def_paths}"
# ── References ────────────────────────────────────────────────────
@pytest.mark.hlsl
class TestHlslReferences:
"""Tests for find-references capability.
shader-language-server does not advertise referencesProvider, so
request_references is expected to return an empty list.
"""
@pytest.mark.parametrize("language_server", [Language.HLSL], indirect=True)
def test_references_not_supported(self, language_server: SolidLanguageServer) -> None:
"""References request should raise because shader-language-server does not support it.
common.hlsl line 17 (0-indexed): "float3 SafeNormalize(float3 v)"
SafeNormalize starts at column 7.
"""
with pytest.raises(SolidLSPException, match="Method not found"):
language_server.request_references("common.hlsl", 17, 7)
# ── Hover ─────────────────────────────────────────────────────────
def _extract_hover_text(hover_info: dict[str, Any]) -> str:
"""Extract the text content from an LSP hover response."""
contents = hover_info["contents"]
if isinstance(contents, dict):
return contents.get("value", "")
elif isinstance(contents, str):
return contents
return str(contents)
@pytest.mark.hlsl
class TestHlslHover:
"""Tests for hover information."""
@pytest.mark.parametrize("language_server", [Language.HLSL], indirect=True)
def test_hover_on_function(self, language_server: SolidLanguageServer) -> None:
"""Hovering over SafeNormalize definition should return info.
common.hlsl line 17 (0-indexed): "float3 SafeNormalize(float3 v)"
SafeNormalize starts at column 7.
"""
hover_info = language_server.request_hover("common.hlsl", 17, 7)
assert hover_info is not None, "Hover should return information for SafeNormalize"
assert "contents" in hover_info, "Hover should have contents"
hover_text = _extract_hover_text(hover_info)
assert len(hover_text) > 0, "Hover text should not be empty"
@pytest.mark.parametrize("language_server", [Language.HLSL], indirect=True)
def test_hover_on_struct(self, language_server: SolidLanguageServer) -> None:
"""Hovering over VertexInput should return struct info.
common.hlsl line 3 (0-indexed): "struct VertexInput"
VertexInput starts at column 7.
"""
hover_info = language_server.request_hover("common.hlsl", 3, 7)
assert hover_info is not None, "Hover should return information for VertexInput"
assert "contents" in hover_info, "Hover should have contents"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/hlsl/test_hlsl_basic.py",
"license": "MIT License",
"lines": 119,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/hlsl/test_hlsl_full_index.py | """
Regression tests for HLSL full symbol tree indexing.
These tests verify that request_full_symbol_tree() correctly indexes all files,
including .hlsl includes in subdirectories. This catches bugs where files are
silently dropped during workspace-wide indexing.
"""
from typing import Any
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
from solidlsp.ls_types import SymbolKind
from solidlsp.ls_utils import SymbolUtils
def _collect_file_names(symbols: list[dict[str, Any]]) -> set[str]:
"""Recursively collect the names of all File-kind symbols in the tree."""
names: set[str] = set()
for sym in symbols:
if sym.get("kind") == SymbolKind.File:
names.add(sym["name"])
if "children" in sym:
names.update(_collect_file_names(sym["children"]))
return names
EXPECTED_FILES = {"common", "lighting", "compute_test", "terrain_sdf"}
TERRAIN_SDF_UNIQUE_SYMBOLS = {"SampleSDF", "CalculateGradient", "SDFBrickData"}
@pytest.mark.hlsl
class TestHlslFullIndex:
"""Tests for full symbol tree indexing completeness."""
@pytest.mark.parametrize("language_server", [Language.HLSL], indirect=True)
def test_all_files_indexed_in_symbol_tree(self, language_server: SolidLanguageServer) -> None:
"""Every .hlsl file in the test repo must appear as a File symbol in the tree."""
symbols = language_server.request_full_symbol_tree()
file_names = _collect_file_names(symbols)
missing = EXPECTED_FILES - file_names
assert not missing, f"Files missing from full symbol tree: {missing}. Found: {file_names}"
@pytest.mark.parametrize("language_server", [Language.HLSL], indirect=True)
def test_subdirectory_file_symbols_present(self, language_server: SolidLanguageServer) -> None:
"""Symbols unique to terrain/terrain_sdf.hlsl must appear in the full tree."""
symbols = language_server.request_full_symbol_tree()
for name in TERRAIN_SDF_UNIQUE_SYMBOLS:
assert SymbolUtils.symbol_tree_contains_name(
symbols, name
), f"Expected '{name}' from terrain/terrain_sdf.hlsl in full symbol tree"
@pytest.mark.parametrize("language_server", [Language.HLSL], indirect=True)
def test_include_file_document_symbols_directly(self, language_server: SolidLanguageServer) -> None:
"""request_document_symbols on terrain/terrain_sdf.hlsl should return its symbols."""
doc_symbols = language_server.request_document_symbols("terrain/terrain_sdf.hlsl")
all_symbols = doc_symbols.get_all_symbols_and_roots()
symbol_names = {s.get("name") for s in all_symbols[0]}
for name in TERRAIN_SDF_UNIQUE_SYMBOLS:
assert name in symbol_names, f"Expected '{name}' in document symbols for terrain/terrain_sdf.hlsl, got: {symbol_names}"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/hlsl/test_hlsl_full_index.py",
"license": "MIT License",
"lines": 49,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/ocaml/test_cross_file_refs.py | """
Test cross-file references for OCaml.
Cross-file references require OCaml >= 5.2 and ocaml-lsp-server >= 1.23.0.
On environments without these (e.g. Windows CI with OCaml 4.14), only
same-file references are asserted.
"""
import logging
import os
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.language_servers.ocaml_lsp_server import OcamlLanguageServer
from solidlsp.ls_config import Language
log = logging.getLogger(__name__)
@pytest.mark.ocaml
class TestCrossFileReferences:
@pytest.mark.parametrize("language_server", [Language.OCAML], indirect=True)
def test_fib_has_cross_file_references(self, language_server: SolidLanguageServer) -> None:
"""Test that fib function references are found across multiple files.
The `fib` function is defined in lib/test_repo.ml and used in:
- lib/test_repo.ml (definition + 2 recursive calls)
- bin/main.ml (1 call)
- test/test_test_repo.ml (5 references)
Total: 9 references across 3 files.
"""
file_path = os.path.join("lib", "test_repo.ml")
fib_line = 7
fib_char = 8
refs = language_server.request_references(file_path, fib_line, fib_char)
lib_refs = [ref for ref in refs if "lib/test_repo.ml" in ref.get("uri", "")]
bin_refs = [ref for ref in refs if "bin/main.ml" in ref.get("uri", "")]
test_refs = [ref for ref in refs if "test/test_test_repo.ml" in ref.get("uri", "")]
log.info("Cross-file references result:")
log.info(f"Total references found: {len(refs)}")
log.info(f" lib/test_repo.ml: {len(lib_refs)}")
log.info(f" bin/main.ml: {len(bin_refs)}")
log.info(f" test/test_test_repo.ml: {len(test_refs)}")
for ref in refs:
uri = ref.get("uri", "")
filename = uri.split("/")[-1]
line = ref.get("range", {}).get("start", {}).get("line", -1)
log.info(f" {filename}:{line}")
# Same-file references always work
assert len(lib_refs) >= 3, f"Expected at least 3 references in lib/test_repo.ml (definition + 2 recursive), but got {len(lib_refs)}"
# Cross-file references require OCaml >= 5.2 and ocaml-lsp-server >= 1.23.0
if isinstance(language_server, OcamlLanguageServer) and language_server.supports_cross_file_references:
assert len(refs) >= 9, (
f"Expected at least 9 total references (3 in lib + 1 in bin + 5 in test), "
f"but got {len(refs)}. Cross-file references are NOT working!"
)
assert len(bin_refs) >= 1, (
f"Expected at least 1 reference in bin/main.ml, but got {len(bin_refs)}. "
"Cross-file references are NOT working - bin/main.ml not found!"
)
assert len(test_refs) >= 1, (
f"Expected at least 1 reference in test/test_test_repo.ml, but got {len(test_refs)}. "
"Cross-file references are NOT working - test file not found!"
)
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/ocaml/test_cross_file_refs.py",
"license": "MIT License",
"lines": 57,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/ocaml/test_ocaml_basic.py | import os
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
from solidlsp.ls_utils import SymbolUtils
@pytest.mark.ocaml
class TestOCamlLanguageServer:
@pytest.mark.parametrize("language_server", [Language.OCAML], indirect=True)
def test_find_symbol(self, language_server: SolidLanguageServer) -> None:
symbols = language_server.request_full_symbol_tree()
assert SymbolUtils.symbol_tree_contains_name(symbols, "DemoModule"), "DemoModule not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "fib"), "fib not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "someFunction"), "someFunction function not found in symbol tree"
@pytest.mark.parametrize("language_server", [Language.OCAML], indirect=True)
def test_find_referencing_symbols(self, language_server: SolidLanguageServer) -> None:
file_path = os.path.join("lib", "test_repo.ml")
# Use the correct character position for 'fib' function name
# Line 8: "let rec fib n =" - 'fib' starts at character 8 (0-indexed)
fib_line = 7 # 0-indexed line number
fib_char = 8 # 0-indexed character position
refs = language_server.request_references(file_path, fib_line, fib_char)
# Should find at least 3 references: definition + 2 recursive calls in same file
assert len(refs) >= 3, f"Expected at least 3 references to fib (definition + 2 recursive), found {len(refs)}"
# All references should be in lib/test_repo.ml (same file as definition)
# Use forward slashes for URI matching (URIs always use /)
lib_refs = [ref for ref in refs if "lib/test_repo.ml" in ref.get("uri", "")]
assert len(lib_refs) >= 3, f"Expected at least 3 references in lib/test_repo.ml, found {len(lib_refs)}"
@pytest.mark.parametrize("language_server", [Language.OCAML], indirect=True)
def test_mixed_ocaml_modules(self, language_server: SolidLanguageServer) -> None:
"""Test that the language server can find symbols from OCaml modules"""
# Test that full symbol tree includes symbols from various file types
all_symbols = language_server.request_full_symbol_tree()
# Should find symbols from main OCaml files
assert SymbolUtils.symbol_tree_contains_name(all_symbols, "fib"), "Should find fib from .ml file"
assert SymbolUtils.symbol_tree_contains_name(all_symbols, "DemoModule"), "Should find DemoModule from .ml file"
assert SymbolUtils.symbol_tree_contains_name(all_symbols, "someFunction"), "Should find someFunction from DemoModule"
assert SymbolUtils.symbol_tree_contains_name(all_symbols, "num_domains"), "Should find num_domains constant"
def test_reason_file_patterns(self) -> None:
"""Test that OCaml language configuration recognizes Reason file extensions"""
from solidlsp.ls_config import Language
ocaml_lang = Language.OCAML
file_matcher = ocaml_lang.get_source_fn_matcher()
# Test OCaml extensions
assert file_matcher.is_relevant_filename("test.ml"), "Should match .ml files"
assert file_matcher.is_relevant_filename("test.mli"), "Should match .mli files"
# Test Reason extensions
assert file_matcher.is_relevant_filename("test.re"), "Should match .re files"
assert file_matcher.is_relevant_filename("test.rei"), "Should match .rei files"
# Test non-matching extensions
assert not file_matcher.is_relevant_filename("test.py"), "Should not match .py files"
assert not file_matcher.is_relevant_filename("test.js"), "Should not match .js files"
@pytest.mark.parametrize("language_server", [Language.OCAML], indirect=True)
def test_module_hierarchy_navigation(self, language_server: SolidLanguageServer) -> None:
"""Test navigation within module hierarchy including DemoModule."""
file_path = os.path.join("lib", "test_repo.ml")
# Use correct position for 'DemoModule' (line 1, char 7)
# Line 1: "module DemoModule = struct" - 'DemoModule' starts around char 7
module_line = 0 # 0-indexed
module_char = 7 # 0-indexed
refs = language_server.request_references(file_path, module_line, module_char)
# Should find at least 1 reference (the definition)
assert len(refs) >= 1, f"Expected at least 1 reference to DemoModule, found {len(refs)}"
# Check that references are found - use forward slashes for URI matching
lib_refs = [ref for ref in refs if "lib/test_repo.ml" in ref.get("uri", "")]
assert len(lib_refs) >= 1, f"Expected at least 1 reference in lib/test_repo.ml, found {len(lib_refs)}"
@pytest.mark.parametrize("language_server", [Language.OCAML], indirect=True)
def test_let_binding_references(self, language_server: SolidLanguageServer) -> None:
"""Test finding references to let-bound values across files."""
file_path = os.path.join("lib", "test_repo.ml")
# Use correct position for 'num_domains' (line 12, char 4)
# Line 12: "let num_domains = 2" - 'num_domains' starts around char 4
num_domains_line = 11 # 0-indexed
num_domains_char = 4 # 0-indexed
refs = language_server.request_references(file_path, num_domains_line, num_domains_char)
# Should find at least 1 reference (the definition)
assert len(refs) >= 1, f"Expected at least 1 reference to num_domains, found {len(refs)}"
# Check that reference is found in the definition file - use forward slashes
ml_refs = [ref for ref in refs if "lib/test_repo.ml" in ref.get("uri", "")]
assert len(ml_refs) >= 1, f"Expected at least 1 reference in lib/test_repo.ml, found {len(ml_refs)}"
@pytest.mark.parametrize("language_server", [Language.OCAML], indirect=True)
def test_recursive_function_analysis(self, language_server: SolidLanguageServer) -> None:
"""Test that recursive function calls are properly identified within the definition file."""
file_path = os.path.join("lib", "test_repo.ml")
# Use correct position for 'fib' function name (line 8, char 8)
fib_line = 7 # 0-indexed
fib_char = 8 # 0-indexed
refs = language_server.request_references(file_path, fib_line, fib_char)
# Filter to references within the definition file only - use forward slashes
same_file_refs = [ref for ref in refs if "lib/test_repo.ml" in ref.get("uri", "")]
# Should find at least 3 references in test_repo.ml: definition + 2 recursive calls
# On OCaml 5.2+ with cross-file refs, there may be more total refs but same-file count stays the same
assert (
len(same_file_refs) >= 3
), f"Expected at least 3 references in test_repo.ml (definition + 2 recursive), found {len(same_file_refs)}"
# Verify references are on different lines (definition + recursive calls)
ref_lines = [ref.get("range", {}).get("start", {}).get("line", -1) for ref in same_file_refs]
unique_lines = len(set(ref_lines))
assert unique_lines >= 2, f"Recursive calls should appear on multiple lines, found {unique_lines} unique lines"
@pytest.mark.parametrize("language_server", [Language.OCAML], indirect=True)
def test_open_statement_resolution(self, language_server: SolidLanguageServer) -> None:
"""Test that open statements allow unqualified access to module contents."""
# In bin/main.ml, fib is called without Test_repo prefix due to 'open Test_repo'
all_symbols = language_server.request_full_symbol_tree()
# Should be able to find fib through symbol tree
fib_accessible = SymbolUtils.symbol_tree_contains_name(all_symbols, "fib")
assert fib_accessible, "fib should be accessible through open statement"
# DemoModule should also be accessible
demo_module_accessible = SymbolUtils.symbol_tree_contains_name(all_symbols, "DemoModule")
assert demo_module_accessible, "DemoModule should be accessible"
# Verify we have access to both qualified and unqualified symbols
assert len(all_symbols) > 0, "Should find symbols from OCaml files"
# Test that the language server recognizes the open statement context
file_path = os.path.join("bin", "main.ml")
symbols, _roots = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
assert len(symbols) > 0, "Should find symbols in main.ml that use opened modules"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/ocaml/test_ocaml_basic.py",
"license": "MIT License",
"lines": 115,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:src/solidlsp/language_servers/systemverilog_server.py | """
SystemVerilog language server using verible-verilog-ls.
"""
import logging
import os
import pathlib
import shutil
import subprocess
from typing import Any, cast
from solidlsp.ls import LanguageServerDependencyProvider, LanguageServerDependencyProviderSinglePath, SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.settings import SolidLSPSettings
from .common import RuntimeDependency, RuntimeDependencyCollection
log = logging.getLogger(__name__)
class SystemVerilogLanguageServer(SolidLanguageServer):
"""
SystemVerilog language server using verible-verilog-ls.
Supports .sv, .svh, .v, .vh files.
"""
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings) -> None:
super().__init__(config, repository_root_path, None, "systemverilog", solidlsp_settings)
def _create_dependency_provider(self) -> LanguageServerDependencyProvider:
return self.DependencyProvider(self._custom_settings, self._ls_resources_dir)
class DependencyProvider(LanguageServerDependencyProviderSinglePath):
def _get_or_install_core_dependency(self) -> str:
# 1. Check PATH first for system-installed verible
system_verible = shutil.which("verible-verilog-ls")
if system_verible:
# Log version information
try:
result = subprocess.run(
[system_verible, "--version"],
capture_output=True,
text=True,
check=False,
timeout=5,
)
if result.returncode == 0:
version_info = result.stdout.strip().split("\n")[0]
log.info(f"Using system-installed verible-verilog-ls: {version_info}")
else:
log.info(f"Using system-installed verible-verilog-ls at {system_verible}")
except Exception:
log.info(f"Using system-installed verible-verilog-ls at {system_verible}")
return system_verible
# 2. Not found in PATH, try to download
verible_version = self._custom_settings.get("verible_version", "v0.0-4051-g9fdb4057")
base_url = f"https://github.com/chipsalliance/verible/releases/download/{verible_version}"
deps = RuntimeDependencyCollection(
[
RuntimeDependency(
id="verible-ls",
description="verible-verilog-ls for Linux (x64)",
url=f"{base_url}/verible-{verible_version}-linux-static-x86_64.tar.gz",
platform_id="linux-x64",
archive_type="gztar",
binary_name=f"verible-{verible_version}/bin/verible-verilog-ls",
),
RuntimeDependency(
id="verible-ls",
description="verible-verilog-ls for Linux (arm64)",
url=f"{base_url}/verible-{verible_version}-linux-static-arm64.tar.gz",
platform_id="linux-arm64",
archive_type="gztar",
binary_name=f"verible-{verible_version}/bin/verible-verilog-ls",
),
RuntimeDependency(
id="verible-ls",
description="verible-verilog-ls for macOS",
url=f"{base_url}/verible-{verible_version}-macOS.tar.gz",
platform_id="osx-x64",
archive_type="gztar",
binary_name=f"verible-{verible_version}/bin/verible-verilog-ls",
),
RuntimeDependency(
id="verible-ls",
description="verible-verilog-ls for macOS",
url=f"{base_url}/verible-{verible_version}-macOS.tar.gz",
platform_id="osx-arm64",
archive_type="gztar",
binary_name=f"verible-{verible_version}/bin/verible-verilog-ls",
),
RuntimeDependency(
id="verible-ls",
description="verible-verilog-ls for Windows (x64)",
url=f"{base_url}/verible-{verible_version}-win64.zip",
platform_id="win-x64",
archive_type="zip",
binary_name=f"verible-{verible_version}/bin/verible-verilog-ls.exe",
),
]
)
try:
dep = deps.get_single_dep_for_current_platform()
except RuntimeError:
dep = None
if dep is None:
raise FileNotFoundError(
"verible-verilog-ls is not installed on your system.\n"
+ "Please install verible using one of the following methods:\n"
+ " conda: conda install -c conda-forge verible\n"
+ " Homebrew: brew install verible\n"
+ " GitHub: Download from https://github.com/chipsalliance/verible/releases\n"
+ "See https://github.com/chipsalliance/verible for more details."
)
verible_ls_dir = os.path.join(self._ls_resources_dir, "verible-ls")
executable_path = deps.binary_path(verible_ls_dir)
if not os.path.exists(executable_path):
log.info(f"verible-verilog-ls not found. Downloading from {dep.url}")
_ = deps.install(verible_ls_dir)
if not os.path.exists(executable_path):
raise FileNotFoundError(f"verible-verilog-ls not found at {executable_path}")
os.chmod(executable_path, 0o755)
return executable_path
def _create_launch_command(self, core_path: str) -> list[str]:
return [core_path]
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"locale": "en",
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"completion": {
"dynamicRegistration": True,
"completionItem": {"snippetSupport": True},
},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"hover": {
"dynamicRegistration": True,
"contentFormat": ["markdown", "plaintext"],
},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
"codeAction": {"dynamicRegistration": True},
"formatting": {"dynamicRegistration": True},
"documentHighlight": {"dynamicRegistration": True},
"publishDiagnostics": {"relatedInformation": True},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
},
},
"workspaceFolders": [{"uri": root_uri, "name": os.path.basename(repository_absolute_path)}],
}
return cast(InitializeParams, initialize_params)
def _start_server(self) -> None:
def do_nothing(params: Any) -> None:
return
def on_log_message(params: Any) -> None:
message = params.get("message", "") if isinstance(params, dict) else str(params)
log.info(f"verible-verilog-ls: {message}")
self.server.on_request("client/registerCapability", do_nothing)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
self.server.on_notification("window/logMessage", on_log_message)
log.info("Starting verible-verilog-ls process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request")
init_response = self.server.send.initialize(initialize_params)
# Validate server capabilities (follows Gopls/Bash pattern)
capabilities = init_response.get("capabilities", {})
log.info(f"Initialize response capabilities: {list(capabilities.keys())}")
assert "textDocumentSync" in capabilities, "verible-verilog-ls must support textDocumentSync"
if "documentSymbolProvider" not in capabilities:
log.warning("verible-verilog-ls does not advertise documentSymbolProvider")
if "definitionProvider" not in capabilities:
log.warning("verible-verilog-ls does not advertise definitionProvider")
self.server.notify.initialized({})
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/systemverilog_server.py",
"license": "MIT License",
"lines": 180,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:test/solidlsp/systemverilog/test_systemverilog_basic.py | """
Basic tests for SystemVerilog language server integration (verible-verilog-ls).
This module tests Language.SYSTEMVERILOG using verible-verilog-ls.
Tests are skipped if the language server is not available.
"""
from typing import Any
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
from solidlsp.ls_utils import SymbolUtils
def _find_symbol_by_name(language_server: SolidLanguageServer, file_path: str, name: str) -> dict[str, Any] | None:
"""Find a top-level symbol by name in a file's document symbols."""
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
return next((s for s in symbols[0] if s.get("name") == name), None)
def _get_symbol_selection_start(language_server: SolidLanguageServer, file_path: str, name: str) -> tuple[int, int]:
"""Get the (line, character) of a symbol's selectionRange start."""
symbol = _find_symbol_by_name(language_server, file_path, name)
assert symbol is not None, f"Could not find symbol '{name}' in {file_path}"
assert "selectionRange" in symbol, f"Symbol '{name}' has no selectionRange in {file_path}"
sel_start = symbol["selectionRange"]["start"]
return sel_start["line"], sel_start["character"]
@pytest.mark.systemverilog
class TestSystemVerilogSymbols:
"""Tests for document symbol extraction."""
@pytest.mark.parametrize("language_server", [Language.SYSTEMVERILOG], indirect=True)
def test_find_symbol(self, language_server: SolidLanguageServer) -> None:
"""Test that symbol tree contains expected modules."""
symbols = language_server.request_full_symbol_tree()
assert SymbolUtils.symbol_tree_contains_name(symbols, "counter"), "Module 'counter' not found in symbol tree"
@pytest.mark.parametrize("language_server", [Language.SYSTEMVERILOG], indirect=True)
def test_get_document_symbols(self, language_server: SolidLanguageServer) -> None:
"""Test document symbols for counter.sv."""
symbol = _find_symbol_by_name(language_server, "counter.sv", "counter")
assert symbol is not None, "Expected 'counter' in document symbols"
@pytest.mark.parametrize("language_server", [Language.SYSTEMVERILOG], indirect=True)
def test_find_top_module(self, language_server: SolidLanguageServer) -> None:
"""Test that top module is found (cross-file instantiation test)."""
symbols = language_server.request_full_symbol_tree()
assert SymbolUtils.symbol_tree_contains_name(symbols, "top"), "Module 'top' not found in symbol tree"
@pytest.mark.systemverilog
class TestSystemVerilogDefinition:
"""Tests for go-to-definition functionality."""
@pytest.mark.parametrize("language_server", [Language.SYSTEMVERILOG], indirect=True)
def test_goto_definition(self, language_server: SolidLanguageServer) -> None:
"""Test go to definition from signal usage to its declaration.
Navigating from 'count' usage in always_ff (line 13) should jump
to the output port declaration (line 7, char 29).
"""
# counter.sv line 13 (0-indexed): " count <= '0;"
# 'count' at char 12
definitions = language_server.request_definition("counter.sv", 13, 12)
assert len(definitions) >= 1, f"Expected at least 1 definition, got {len(definitions)}"
def_in_counter = [d for d in definitions if "counter.sv" in (d.get("relativePath") or "")]
assert len(def_in_counter) >= 1, f"Expected definition in counter.sv, got: {[d.get('relativePath') for d in definitions]}"
assert (
def_in_counter[0]["range"]["start"]["line"] == 7
), f"Expected definition at line 7 (output port count), got line {def_in_counter[0]['range']['start']['line']}"
@pytest.mark.parametrize("language_server", [Language.SYSTEMVERILOG], indirect=True)
def test_goto_definition_cross_file(self, language_server: SolidLanguageServer) -> None:
"""Test go to definition from module instantiation in top.sv to counter.sv.
This is the key cross-file test: navigating from an instantiation
(counter in top.sv) to its definition (counter.sv).
"""
# top.sv line 17 (0-indexed: 16): " counter #(.WIDTH(8)) u_counter ("
# "counter" starts at column 4
definitions = language_server.request_definition("top.sv", 16, 4)
assert len(definitions) >= 1, f"Expected at least 1 definition, got {len(definitions)}"
def_paths = [d.get("relativePath", "") for d in definitions]
assert any("counter.sv" in p for p in def_paths), f"Expected definition in counter.sv, got: {def_paths}"
counter_defs = [d for d in definitions if "counter.sv" in (d.get("relativePath") or "")]
assert (
counter_defs[0]["range"]["start"]["line"] == 1
), f"Expected definition at line 1 (module counter), got line {counter_defs[0]['range']['start']['line']}"
@pytest.mark.systemverilog
class TestSystemVerilogReferences:
"""Tests for find-references functionality."""
@pytest.mark.parametrize("language_server", [Language.SYSTEMVERILOG], indirect=True)
def test_find_references(self, language_server: SolidLanguageServer) -> None:
"""Test finding within-file references to a port signal.
The 'count' output port is declared on line 7 and used in the
always_ff block on lines 13 and 15 (twice), giving 3 within-file
references — all inside counter.sv.
"""
# counter.sv line 8 (0-indexed: 7): " output logic [WIDTH-1:0] count"
# 'count' starts at char 29
references = language_server.request_references("counter.sv", 7, 29)
assert len(references) >= 1, f"Expected at least 1 reference, got {len(references)}"
ref_paths = [r.get("relativePath", "") for r in references]
refs_in_counter = [r for r in references if "counter.sv" in (r.get("relativePath") or "")]
assert len(refs_in_counter) >= 1, f"Expected within-file references in counter.sv, got paths: {ref_paths}"
ref_lines = sorted(r["range"]["start"]["line"] for r in refs_in_counter)
# Line 13: count <= '0; Line 15: count <= count + 1'b1; (two refs)
assert 13 in ref_lines, f"Expected reference at line 13 (count <= '0), got lines: {ref_lines}"
assert 15 in ref_lines, f"Expected reference at line 15 (count <= count + 1'b1), got lines: {ref_lines}"
@pytest.mark.parametrize("language_server", [Language.SYSTEMVERILOG], indirect=True)
def test_find_references_cross_file(self, language_server: SolidLanguageServer) -> None:
"""Test that references to counter include its instantiation in top.sv.
Similar to Rust (lib.rs → main.rs) and C# (Program.cs → Models/Person.cs),
this verifies that cross-file references are found.
"""
line, char = _get_symbol_selection_start(language_server, "counter.sv", "counter")
references = language_server.request_references("counter.sv", line, char)
ref_paths = [ref.get("relativePath", "") for ref in references]
assert any("top.sv" in p for p in ref_paths), f"Expected reference from top.sv, got: {ref_paths}"
refs_in_top = [r for r in references if "top.sv" in (r.get("relativePath") or "")]
# top.sv line 17 (0-indexed: 16): " counter #(.WIDTH(8)) u_counter ("
assert (
refs_in_top[0]["range"]["start"]["line"] == 16
), f"Expected cross-file reference at line 16 (counter instantiation), got line {refs_in_top[0]['range']['start']['line']}"
def _extract_hover_text(hover_info: dict[str, Any]) -> str:
"""Extract the text content from an LSP hover response."""
contents = hover_info["contents"]
if isinstance(contents, dict):
return contents.get("value", "")
elif isinstance(contents, str):
return contents
return str(contents)
@pytest.mark.systemverilog
class TestSystemVerilogHover:
"""Tests for hover information."""
@pytest.mark.parametrize("language_server", [Language.SYSTEMVERILOG], indirect=True)
def test_hover(self, language_server: SolidLanguageServer) -> None:
"""Test hover information (experimental in verible, requires --lsp_enable_hover)."""
line, char = _get_symbol_selection_start(language_server, "counter.sv", "counter")
hover_info = language_server.request_hover("counter.sv", line, char)
assert hover_info is not None, "Hover should return information for counter module"
assert "contents" in hover_info, "Hover should have contents"
hover_text = _extract_hover_text(hover_info)
assert len(hover_text) > 0, "Hover text should not be empty"
assert "counter" in hover_text.lower(), f"Hover should mention 'counter', got: {hover_text}"
assert "module" in hover_text.lower(), f"Hover should identify 'counter' as a module, got: {hover_text}"
@pytest.mark.parametrize("language_server", [Language.SYSTEMVERILOG], indirect=True)
def test_hover_includes_type_information(self, language_server: SolidLanguageServer) -> None:
"""Test that hover includes type information for a port signal.
Hovering on 'count' output port should return its name and type
(logic [WIDTH-1:0]), distinct from module-level hover.
"""
# counter.sv line 8 (0-indexed: 7): " output logic [WIDTH-1:0] count"
# 'count' starts at char 29
hover_info = language_server.request_hover("counter.sv", 7, 29)
assert hover_info is not None, "Hover should return information for 'count' port"
assert "contents" in hover_info, "Hover should have contents"
hover_text = _extract_hover_text(hover_info)
assert "count" in hover_text.lower(), f"Hover should mention 'count', got: {hover_text}"
assert "logic" in hover_text.lower(), f"Hover should include type 'logic', got: {hover_text}"
def _extract_changes(workspace_edit: dict[str, Any]) -> dict[str, list[dict[str, Any]]]:
"""Extract file URI → edits mapping from a WorkspaceEdit, handling both formats."""
changes = workspace_edit.get("changes", {})
if not changes:
doc_changes = workspace_edit.get("documentChanges", [])
assert len(doc_changes) > 0, "WorkspaceEdit should have 'changes' or 'documentChanges'"
changes = {dc["textDocument"]["uri"]: dc["edits"] for dc in doc_changes if "textDocument" in dc and "edits" in dc}
return changes
@pytest.mark.systemverilog
class TestSystemVerilogRename:
"""Tests for rename functionality."""
@pytest.mark.parametrize("language_server", [Language.SYSTEMVERILOG], indirect=True)
def test_rename_signal_within_file(self, language_server: SolidLanguageServer) -> None:
"""Test renaming a port signal from its declaration updates within-file occurrences.
The 'count' output port (line 7, char 29) is used in the always_ff
block on lines 13 and 15. Renaming from the declaration site produces
edits for all occurrences within counter.sv.
"""
workspace_edit = language_server.request_rename_symbol_edit("counter.sv", 7, 29, "cnt")
assert workspace_edit is not None, "Rename should be supported for port signal 'count'"
changes = _extract_changes(workspace_edit)
counter_edits = [edits for uri, edits in changes.items() if "counter.sv" in uri]
assert len(counter_edits) >= 1, f"Should have edits for counter.sv, got: {list(changes.keys())}"
edits = counter_edits[0]
assert len(edits) >= 2, f"Expected at least 2 edits (declaration + usage), got {len(edits)}"
edit_lines = sorted(e["range"]["start"]["line"] for e in edits)
assert 7 in edit_lines, f"Expected edit at line 7 (port declaration), got lines: {edit_lines}"
assert 13 in edit_lines, f"Expected edit at line 13 (count <= '0), got lines: {edit_lines}"
assert 15 in edit_lines, f"Expected edit at line 15 (count <= count + 1'b1), got lines: {edit_lines}"
for edit in edits:
assert edit["newText"] == "cnt", f"Expected newText 'cnt', got {edit['newText']}"
@pytest.mark.parametrize("language_server", [Language.SYSTEMVERILOG], indirect=True)
def test_rename_signal_cross_file(self, language_server: SolidLanguageServer) -> None:
"""Test renaming a port signal from a usage site includes cross-file edits.
Renaming 'count' from usage in always_ff (line 13, char 12) should
produce edits in counter.sv (declaration + usages) and also in top.sv
where the port is connected (.count(count) at line 20).
"""
workspace_edit = language_server.request_rename_symbol_edit("counter.sv", 13, 12, "cnt")
assert workspace_edit is not None, "Rename should be supported for signal 'count' from usage site"
changes = _extract_changes(workspace_edit)
counter_uris = [uri for uri in changes if "counter.sv" in uri]
top_uris = [uri for uri in changes if "top.sv" in uri]
assert len(counter_uris) >= 1, f"Expected edits in counter.sv, got: {list(changes.keys())}"
assert len(top_uris) >= 1, f"Expected cross-file edits in top.sv, got: {list(changes.keys())}"
for uri, edits in changes.items():
for edit in edits:
assert edit["newText"] == "cnt", f"Expected 'cnt' in {uri}, got {edit['newText']}"
@pytest.mark.parametrize("language_server", [Language.SYSTEMVERILOG], indirect=True)
def test_rename_module_name(self, language_server: SolidLanguageServer) -> None:
"""Test renaming a module name at its declaration.
The 'counter' module declaration (line 1, char 7) is renamed to
'my_counter'. Verible renames the identifier at the definition site.
"""
line, char = _get_symbol_selection_start(language_server, "counter.sv", "counter")
workspace_edit = language_server.request_rename_symbol_edit("counter.sv", line, char, "my_counter")
assert workspace_edit is not None, "Rename should be supported for module 'counter'"
changes = _extract_changes(workspace_edit)
assert len(changes) > 0, "WorkspaceEdit should have changes"
counter_edits = [edits for uri, edits in changes.items() if "counter.sv" in uri]
assert len(counter_edits) >= 1, f"Should have edits for counter.sv, got: {list(changes.keys())}"
edits = counter_edits[0]
edit_lines = sorted(e["range"]["start"]["line"] for e in edits)
assert 1 in edit_lines, f"Expected edit at line 1 (module declaration), got lines: {edit_lines}"
decl_edits = [e for e in edits if e["range"]["start"]["line"] == 1]
assert (
decl_edits[0]["range"]["start"]["character"] == 7
), f"Expected edit at char 7, got char {decl_edits[0]['range']['start']['character']}"
for uri, file_edits in changes.items():
for edit in file_edits:
assert edit["newText"] == "my_counter", f"Expected 'my_counter', got {edit['newText']}"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/systemverilog/test_systemverilog_basic.py",
"license": "MIT License",
"lines": 216,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/systemverilog/test_systemverilog_detection.py | """
Tests for verible-verilog-ls detection logic.
These tests describe the expected behavior of SystemVerilogLanguageServer.DependencyProvider._get_or_install_core_dependency():
1. System PATH should be checked FIRST (prefers user-installed verible)
2. Runtime download should be fallback when not in PATH
3. Version information should be logged when available
4. Version check failures should be handled gracefully
5. Helpful error messages when verible is not available on unsupported platforms
WHY these tests matter:
- Users install verible via conda, Homebrew, system packages, or GitHub releases
- Detection failing means Serena is unusable for SystemVerilog, even when verible is correctly installed
- Without these tests, the detection logic can silently break for users with system installations
- Version logging helps debug compatibility issues
"""
import os
import shutil
import subprocess
import tempfile
from unittest.mock import MagicMock, Mock, patch
import pytest
from solidlsp.language_servers.systemverilog_server import SystemVerilogLanguageServer
from solidlsp.settings import SolidLSPSettings
DEFAULT_VERIBLE_VERSION = "v0.0-4051-g9fdb4057"
class TestVeribleVerilogLsDetection:
"""Unit tests for verible-verilog-ls binary detection logic."""
@pytest.mark.systemverilog
def test_detect_from_path_returns_system_verible(self):
"""
GIVEN verible-verilog-ls is in system PATH
WHEN _get_or_install_core_dependency is called
THEN it returns the system path without downloading
WHY: Users with system-installed verible (via conda, Homebrew, apt)
should use that version instead of downloading. This is faster and
respects user's environment management.
"""
with tempfile.TemporaryDirectory() as temp_dir:
custom_settings = SolidLSPSettings.CustomLSSettings({})
provider = SystemVerilogLanguageServer.DependencyProvider(custom_settings, temp_dir)
with patch("shutil.which") as mock_which:
mock_which.return_value = "/usr/local/bin/verible-verilog-ls"
with patch("subprocess.run") as mock_run:
mock_run.return_value = MagicMock(
returncode=0,
stdout="Verible v0.0-4051-g9fdb4057 (2024-01-01)\nCommit: 9fdb4057",
stderr="",
)
result = provider._get_or_install_core_dependency()
assert result == "/usr/local/bin/verible-verilog-ls"
mock_which.assert_called_once_with("verible-verilog-ls")
mock_run.assert_called_once()
assert mock_run.call_args[0][0] == ["/usr/local/bin/verible-verilog-ls", "--version"]
@pytest.mark.systemverilog
def test_detect_from_path_logs_version(self):
"""
GIVEN verible-verilog-ls is in PATH with version output
WHEN detected
THEN version info is logged
WHY: Version information helps debug compatibility issues.
Users and developers need to know which verible version is being used.
"""
with tempfile.TemporaryDirectory() as temp_dir:
custom_settings = SolidLSPSettings.CustomLSSettings({})
provider = SystemVerilogLanguageServer.DependencyProvider(custom_settings, temp_dir)
with patch("shutil.which", return_value="/usr/bin/verible-verilog-ls"):
with patch("subprocess.run") as mock_run:
mock_run.return_value = MagicMock(returncode=0, stdout="Verible v0.0-4051-g9fdb4057", stderr="")
with patch("solidlsp.language_servers.systemverilog_server.log") as mock_log:
result = provider._get_or_install_core_dependency()
# Verify version check was called
assert mock_run.call_args[0][0] == ["/usr/bin/verible-verilog-ls", "--version"]
# Verify version was logged
assert mock_log.info.called
log_message = mock_log.info.call_args[0][0]
assert "Verible v0.0-4051" in log_message
assert result == "/usr/bin/verible-verilog-ls"
@pytest.mark.systemverilog
def test_detect_from_path_handles_version_failure_gracefully(self):
"""
GIVEN verible-verilog-ls is in PATH but --version fails (returncode=1)
WHEN detected
THEN it still returns the system path (graceful degradation)
WHY: Some verible builds might not support --version or have different flags.
Detection should not fail just because version check fails - the binary
might still work fine for LSP operations.
"""
with tempfile.TemporaryDirectory() as temp_dir:
custom_settings = SolidLSPSettings.CustomLSSettings({})
provider = SystemVerilogLanguageServer.DependencyProvider(custom_settings, temp_dir)
with patch("shutil.which", return_value="/custom/bin/verible-verilog-ls"):
with patch("subprocess.run") as mock_run:
# Version check fails
mock_run.return_value = MagicMock(returncode=1, stdout="", stderr="Unknown option: --version")
result = provider._get_or_install_core_dependency()
# Should still return the path despite version check failure
assert result == "/custom/bin/verible-verilog-ls"
@pytest.mark.systemverilog
def test_detect_from_path_handles_version_timeout_gracefully(self):
"""
GIVEN verible-verilog-ls is in PATH but --version times out
WHEN detected
THEN it still returns the system path (graceful degradation)
WHY: Version check has a timeout to avoid hanging. If it times out,
we should still use the detected binary.
"""
with tempfile.TemporaryDirectory() as temp_dir:
custom_settings = SolidLSPSettings.CustomLSSettings({})
provider = SystemVerilogLanguageServer.DependencyProvider(custom_settings, temp_dir)
with patch("shutil.which", return_value="/opt/verible/bin/verible-verilog-ls"):
with patch("subprocess.run") as mock_run:
# Version check times out
mock_run.side_effect = subprocess.TimeoutExpired(cmd=["verible-verilog-ls", "--version"], timeout=5)
result = provider._get_or_install_core_dependency()
# Should still return the path despite timeout
assert result == "/opt/verible/bin/verible-verilog-ls"
@pytest.mark.systemverilog
def test_error_message_when_not_found_anywhere(self):
"""
GIVEN verible is NOT in PATH AND platform is unsupported
WHEN _get_or_install_core_dependency is called
THEN raises FileNotFoundError with helpful installation instructions
WHY: Users need clear guidance on how to install verible when it's missing.
Error message should mention conda, Homebrew, and GitHub releases.
"""
with tempfile.TemporaryDirectory() as temp_dir:
custom_settings = SolidLSPSettings.CustomLSSettings({})
provider = SystemVerilogLanguageServer.DependencyProvider(custom_settings, temp_dir)
with patch("shutil.which", return_value=None):
# Mock RuntimeDependencyCollection to raise RuntimeError for unsupported platform
with patch("solidlsp.language_servers.systemverilog_server.RuntimeDependencyCollection") as mock_deps_class:
mock_deps = Mock()
mock_deps.get_single_dep_for_current_platform.side_effect = RuntimeError("Unsupported platform")
mock_deps_class.return_value = mock_deps
with pytest.raises(FileNotFoundError) as exc_info:
provider._get_or_install_core_dependency()
error_message = str(exc_info.value)
# Error should mention installation methods
assert "conda" in error_message.lower()
assert "Homebrew" in error_message or "brew" in error_message.lower()
assert "GitHub" in error_message or "github.com" in error_message.lower()
assert "verible" in error_message.lower()
@pytest.mark.systemverilog
def test_downloads_when_not_in_path(self):
"""
GIVEN verible is NOT in PATH AND platform IS supported AND binary exists after download
WHEN _get_or_install_core_dependency is called
THEN returns the downloaded executable path
WHY: When verible is not installed system-wide and platform is supported,
Serena should auto-download it. This enables zero-setup experience.
"""
with tempfile.TemporaryDirectory() as temp_dir:
custom_settings = SolidLSPSettings.CustomLSSettings({})
provider = SystemVerilogLanguageServer.DependencyProvider(custom_settings, temp_dir)
expected_path = os.path.join(temp_dir, "verible-ls", f"verible-{DEFAULT_VERIBLE_VERSION}", "bin", "verible-verilog-ls")
with patch("shutil.which", return_value=None):
with patch("solidlsp.language_servers.systemverilog_server.RuntimeDependencyCollection") as mock_deps_class:
# Create mock dependency and collection
mock_dep = Mock()
mock_dep.url = "https://github.com/chipsalliance/verible/releases/download/v0.0-4051/verible.tar.gz"
mock_deps = Mock()
mock_deps.get_single_dep_for_current_platform.return_value = mock_dep
mock_deps.binary_path.return_value = expected_path
mock_deps.install.return_value = expected_path
mock_deps_class.return_value = mock_deps
with patch("os.path.exists") as mock_exists:
# Before download: binary doesn't exist yet → after download: binary exists
mock_exists.side_effect = [False, True]
with patch("os.chmod"):
result = provider._get_or_install_core_dependency()
assert result == expected_path
mock_deps.install.assert_called_once()
@pytest.mark.systemverilog
def test_detection_prefers_path_over_download(self):
"""
GIVEN verible is in PATH AND download would also work
WHEN _get_or_install_core_dependency is called
THEN PATH version is used (download never attempted)
WHY: System-installed verible should always take precedence.
This respects user's environment and avoids unnecessary downloads.
"""
with tempfile.TemporaryDirectory() as temp_dir:
custom_settings = SolidLSPSettings.CustomLSSettings({})
provider = SystemVerilogLanguageServer.DependencyProvider(custom_settings, temp_dir)
with patch("shutil.which", return_value="/usr/bin/verible-verilog-ls"):
with patch("subprocess.run") as mock_run:
mock_run.return_value = MagicMock(returncode=0, stdout="Verible v0.0-4051", stderr="")
with patch("solidlsp.language_servers.systemverilog_server.RuntimeDependencyCollection") as mock_deps_class:
result = provider._get_or_install_core_dependency()
# RuntimeDependencyCollection should never be instantiated
mock_deps_class.assert_not_called()
assert result == "/usr/bin/verible-verilog-ls"
@pytest.mark.systemverilog
def test_download_fails_if_binary_not_found_after_install(self):
"""
GIVEN verible is NOT in PATH AND platform IS supported
WHEN download completes BUT binary still doesn't exist at expected path
THEN raises FileNotFoundError
WHY: If download/extraction fails silently, we should catch it and report clearly.
"""
with tempfile.TemporaryDirectory() as temp_dir:
custom_settings = SolidLSPSettings.CustomLSSettings({})
provider = SystemVerilogLanguageServer.DependencyProvider(custom_settings, temp_dir)
expected_path = os.path.join(temp_dir, "verible-ls", f"verible-{DEFAULT_VERIBLE_VERSION}", "bin", "verible-verilog-ls")
with patch("shutil.which", return_value=None):
with patch("solidlsp.language_servers.systemverilog_server.RuntimeDependencyCollection") as mock_deps_class:
mock_dep = Mock()
mock_deps = Mock()
mock_deps.get_single_dep_for_current_platform.return_value = mock_dep
mock_deps.binary_path.return_value = expected_path
mock_deps.install.return_value = expected_path
mock_deps_class.return_value = mock_deps
# Binary never appears after install
with patch("os.path.exists", return_value=False):
with pytest.raises(FileNotFoundError) as exc_info:
provider._get_or_install_core_dependency()
error_message = str(exc_info.value)
assert "verible-verilog-ls not found" in error_message
assert expected_path in error_message
@pytest.mark.systemverilog
def test_uses_already_downloaded_binary_without_reinstalling(self):
"""
GIVEN verible is NOT in PATH AND platform IS supported
AND binary already exists at download location
WHEN _get_or_install_core_dependency is called
THEN returns existing path without downloading again
WHY: Avoid redundant downloads if verible was already downloaded in previous session.
This speeds up subsequent runs.
"""
with tempfile.TemporaryDirectory() as temp_dir:
custom_settings = SolidLSPSettings.CustomLSSettings({})
provider = SystemVerilogLanguageServer.DependencyProvider(custom_settings, temp_dir)
expected_path = os.path.join(temp_dir, "verible-ls", f"verible-{DEFAULT_VERIBLE_VERSION}", "bin", "verible-verilog-ls")
with patch("shutil.which", return_value=None):
with patch("solidlsp.language_servers.systemverilog_server.RuntimeDependencyCollection") as mock_deps_class:
mock_dep = Mock()
mock_deps = Mock()
mock_deps.get_single_dep_for_current_platform.return_value = mock_dep
mock_deps.binary_path.return_value = expected_path
mock_deps_class.return_value = mock_deps
# Binary already exists
with patch("os.path.exists", return_value=True):
with patch("os.chmod"):
result = provider._get_or_install_core_dependency()
# Should NOT call install since binary already exists
mock_deps.install.assert_not_called()
assert result == expected_path
class TestVeribleVerilogLsDetectionIntegration:
"""
Integration tests that verify detection works on the current system.
These tests are skipped if verible-verilog-ls is not installed.
"""
@pytest.mark.systemverilog
def test_integration_finds_installed_verible(self):
"""
GIVEN verible-verilog-ls is installed on this system (via any method)
WHEN _get_or_install_core_dependency is called
THEN it returns a valid executable path
This test verifies the detection logic works end-to-end on the current system.
"""
# Skip if verible-verilog-ls is not installed
if not shutil.which("verible-verilog-ls"):
pytest.skip("verible-verilog-ls not installed on this system")
with tempfile.TemporaryDirectory() as temp_dir:
custom_settings = SolidLSPSettings.CustomLSSettings({})
provider = SystemVerilogLanguageServer.DependencyProvider(custom_settings, temp_dir)
result = provider._get_or_install_core_dependency()
assert result is not None
assert os.path.isfile(result)
assert os.access(result, os.X_OK)
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/systemverilog/test_systemverilog_detection.py",
"license": "MIT License",
"lines": 268,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:src/solidlsp/language_servers/ccls_language_server.py | """
This is an alternative to clangd for large C++ codebases where ccls may perform
better for indexing and navigation. Requires ccls to be installed and available
on PATH, or configured via ls_specific_settings with key "ls_path".
Installation
------------
ccls must be installed manually as there are no prebuilt binaries available for
direct download. Install using your system package manager:
**Linux:**
- Ubuntu/Debian (22.04+): ``sudo apt-get install ccls``
- Fedora/RHEL: ``sudo dnf install ccls``
- Arch Linux: ``sudo pacman -S ccls``
- openSUSE Tumbleweed: ``sudo zypper install ccls``
- Gentoo: ``sudo emerge dev-util/ccls``
**macOS:**
- Homebrew: ``brew install ccls``
**Windows:**
- Chocolatey: ``choco install ccls``
For alternative installation methods and build-from-source instructions, see:
https://github.com/MaskRay/ccls/wiki/Build
Official documentation:
https://github.com/MaskRay/ccls
"""
import logging
import os
import pathlib
import threading
from typing import Any, cast
from solidlsp.ls import (
LanguageServerDependencyProvider,
LanguageServerDependencyProviderSinglePath,
SolidLanguageServer,
)
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class CCLS(SolidLanguageServer):
"""
C/C++ language server implementation using ccls.
Notes:
- ccls should be installed and on PATH (or specify ls_path in settings)
- compile_commands.json at repo root is recommended for accurate indexing
"""
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Creates a CclsLanguageServer instance. This class is not meant to be instantiated directly.
Use LanguageServer.create() instead.
"""
super().__init__(config, repository_root_path, None, "cpp", solidlsp_settings)
self.server_ready = threading.Event()
def _create_dependency_provider(self) -> LanguageServerDependencyProvider:
return self.DependencyProvider(self._custom_settings, self._ls_resources_dir)
class DependencyProvider(LanguageServerDependencyProviderSinglePath):
def _get_or_install_core_dependency(self) -> str:
"""
Resolve ccls path from system or raise helpful error if missing.
Allows override via ls_specific_settings[language].ls_path.
"""
import shutil
ccls_path = shutil.which("ccls")
if not ccls_path:
raise FileNotFoundError(
"ccls is not installed on your system.\n"
"Please install ccls using your system package manager:\n"
" Linux (Ubuntu/Debian): sudo apt-get install ccls\n"
" Linux (Fedora/RHEL): sudo dnf install ccls\n"
" Linux (Arch): sudo pacman -S ccls\n"
" macOS (Homebrew): brew install ccls\n"
" Windows: choco install ccls\n\n"
"For build instructions and more details, see:\n"
" https://github.com/MaskRay/ccls/wiki/Build"
)
log.info(f"Using system-installed ccls at {ccls_path}")
return ccls_path
def _create_launch_command(self, core_path: str) -> list[str]:
return [core_path]
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the ccls Language Server.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"locale": "en",
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"completion": {"dynamicRegistration": True, "completionItem": {"snippetSupport": True}},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {"dynamicRegistration": True},
},
"workspace": {"workspaceFolders": True, "didChangeConfiguration": {"dynamicRegistration": True}},
},
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"workspaceFolders": [
{
"uri": root_uri,
"name": "$name",
}
],
# ccls supports initializationOptions but none are required for basic functionality
}
return cast(InitializeParams, initialize_params)
def _start_server(self) -> None:
"""
Starts the ccls language server and initializes the LSP connection.
"""
def do_nothing(params: Any) -> None:
pass
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
# Register minimal handlers
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting ccls server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to ccls and awaiting response")
self.server.send.initialize(initialize_params)
# Do not assert clangd-specific capability shapes; ccls differs
self.server.notify.initialized({})
# Basic readiness
self.server_ready.set()
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/ccls_language_server.py",
"license": "MIT License",
"lines": 128,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
oraios/serena:src/solidlsp/language_servers/phpactor.py | """
Provides PHP specific instantiation of the LanguageServer class using Phpactor.
"""
import logging
import os
import pathlib
import re
import shutil
import stat
import subprocess
from overrides import override
from solidlsp.ls import LanguageServerDependencyProvider, LanguageServerDependencyProviderSinglePath, SolidLanguageServer
from solidlsp.ls_config import Language, LanguageServerConfig
from solidlsp.ls_utils import FileUtils
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
PHPACTOR_VERSION = "2025.12.21.1"
PHPACTOR_PHAR_URL = f"https://github.com/phpactor/phpactor/releases/download/{PHPACTOR_VERSION}/phpactor.phar"
class PhpactorServer(SolidLanguageServer):
"""
Provides PHP specific instantiation of the LanguageServer class using Phpactor.
Phpactor is an open-source (MIT) PHP language server that requires PHP 8.1+ on the system.
It is an alternative to Intelephense, which is the default PHP language server.
You can pass the following entries in ls_specific_settings["php_phpactor"]:
- ignore_vendor: whether to ignore directories named "vendor" (default: true)
"""
@override
def is_ignored_dirname(self, dirname: str) -> bool:
return super().is_ignored_dirname(dirname) or dirname in self._ignored_dirnames
class DependencyProvider(LanguageServerDependencyProviderSinglePath):
def _get_or_install_core_dependency(self) -> str:
"""
Setup runtime dependencies for Phpactor and return the path to the PHAR file.
"""
# Verify PHP is installed
php_path = shutil.which("php")
assert (
php_path is not None
), "PHP is not installed or not found in PATH. Phpactor requires PHP 8.1+. Please install PHP and try again."
# Check PHP version (Phpactor requires PHP 8.1+)
result = subprocess.run(["php", "--version"], capture_output=True, text=True, check=False)
php_version_output = result.stdout.strip()
log.info(f"PHP version: {php_version_output}")
version_match = re.search(r"PHP (\d+)\.(\d+)", php_version_output)
if version_match:
major, minor = int(version_match.group(1)), int(version_match.group(2))
if major < 8 or (major == 8 and minor < 1):
raise RuntimeError(f"PHP {major}.{minor} detected, but Phpactor requires PHP 8.1+. Please upgrade PHP.")
else:
log.warning("Could not parse PHP version from output. Continuing anyway.")
phpactor_phar_path = os.path.join(self._ls_resources_dir, "phpactor.phar")
if not os.path.exists(phpactor_phar_path):
os.makedirs(self._ls_resources_dir, exist_ok=True)
log.info(f"Downloading phpactor PHAR from {PHPACTOR_PHAR_URL}")
FileUtils.download_and_extract_archive(PHPACTOR_PHAR_URL, phpactor_phar_path, "binary")
assert os.path.exists(phpactor_phar_path), f"phpactor PHAR not found at {phpactor_phar_path}, download may have failed."
# Ensure the PHAR is executable
current_mode = os.stat(phpactor_phar_path).st_mode
os.chmod(phpactor_phar_path, current_mode | stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH)
return phpactor_phar_path
def _create_launch_command(self, core_path: str) -> list[str]:
return ["php", core_path, "language-server"]
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
super().__init__(config, repository_root_path, None, "php", solidlsp_settings)
# Override internal language enum for correct file matching
self.language = Language.PHP_PHPACTOR
self._ignored_dirnames = {"node_modules", "cache"}
if self._custom_settings.get("ignore_vendor", True):
self._ignored_dirnames.add("vendor")
log.info(f"Ignoring the following directories for PHP (Phpactor): {', '.join(sorted(self._ignored_dirnames))}")
def _create_dependency_provider(self) -> LanguageServerDependencyProvider:
return self.DependencyProvider(self._custom_settings, self._ls_resources_dir)
def _get_initialize_params(self, repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialization params for the Phpactor Language Server.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"definition": {"dynamicRegistration": True},
"documentSymbol": {
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
},
},
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
"initializationOptions": {
"language_server_phpstan.enabled": False,
"language_server_psalm.enabled": False,
"language_server_php_cs_fixer.enabled": False,
},
}
return initialize_params # type: ignore
def _start_server(self) -> None:
"""Start Phpactor server process."""
def register_capability_handler(params: dict) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
def do_nothing(params: dict) -> None:
return
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting Phpactor server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
log.info("After sent initialize params")
# Verify server capabilities
assert "capabilities" in init_response
assert init_response["capabilities"].get("definitionProvider"), "Phpactor did not advertise definition support"
self.server.notify.initialized({})
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/phpactor.py",
"license": "MIT License",
"lines": 131,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/util/metals_db_utils.py | """
Utilities for detecting and managing Scala Metals H2 database state.
This module provides functions to detect existing Metals LSP instances by checking
the H2 database lock file, and to clean up stale locks from crashed processes.
Metals uses H2 AUTO_SERVER mode (enabled by default) to support multiple concurrent
instances sharing the same database. However, if a Metals process crashes without
proper cleanup, it can leave a stale lock file that prevents proper AUTO_SERVER
coordination, causing new instances to fall back to in-memory database mode.
"""
from __future__ import annotations
import logging
import re
from dataclasses import dataclass
from enum import Enum
from pathlib import Path
from typing import TYPE_CHECKING
if TYPE_CHECKING:
pass
log = logging.getLogger(__name__)
class MetalsDbStatus(Enum):
"""Status of the Metals H2 database for a project."""
NO_DATABASE = "no_database"
"""No .metals directory or database exists (fresh project)."""
NO_LOCK = "no_lock"
"""Database exists but no lock file (safe to start)."""
ACTIVE_INSTANCE = "active_instance"
"""Lock held by a running process (will share via AUTO_SERVER)."""
STALE_LOCK = "stale_lock"
"""Lock held by a dead process (needs cleanup)."""
@dataclass
class MetalsLockInfo:
"""Information extracted from an H2 database lock file."""
pid: int | None
"""Process ID that holds the lock, if parseable."""
port: int | None
"""TCP port for AUTO_SERVER connection, if parseable."""
lock_path: Path
"""Path to the lock file."""
is_stale: bool
"""True if the owning process is no longer running."""
raw_content: str
"""Raw content of the lock file for debugging."""
def parse_h2_lock_file(lock_path: Path) -> MetalsLockInfo | None:
"""
Parse an H2 database lock file to extract connection information.
The H2 lock file format varies by version but typically contains
server connection information. Common formats include:
- Text format: "server:localhost:9092" or similar
- Binary format with embedded PID
Args:
lock_path: Path to the .lock.db file
Returns:
MetalsLockInfo if the file can be parsed, None if file doesn't exist
or is completely unparsable.
"""
if not lock_path.exists():
return None
try:
# Try reading as text first (most common for H2 AUTO_SERVER)
content = lock_path.read_text(encoding="utf-8", errors="replace")
except OSError as e:
log.debug(f"Could not read lock file {lock_path}: {e}")
return None
pid: int | None = None
port: int | None = None
# Try to extract port from common H2 lock file formats
# Format 1: "server:localhost:PORT"
server_match = re.search(r"server:[\w.]+:(\d+)", content, re.IGNORECASE)
if server_match:
port = int(server_match.group(1))
# Format 2: Look for standalone port numbers (H2 uses ports in 9000+ range typically)
if port is None:
port_match = re.search(r"\b(9\d{3})\b", content)
if port_match:
port = int(port_match.group(1))
# Try to extract PID - H2 may embed this in various formats
pid_match = re.search(r"pid[=:]?\s*(\d+)", content, re.IGNORECASE)
if pid_match:
pid = int(pid_match.group(1))
# Check if the process is still alive
is_stale = False
if pid is not None:
is_stale = not is_metals_process_alive(pid)
elif port is not None:
# If we have a port but no PID, try to find a Metals process using that port
is_stale = not _is_port_in_use_by_metals(port)
else:
# Can't determine - assume stale if lock exists but we can't parse it
# and no Metals processes are running for this project
log.debug(f"Could not parse PID or port from lock file: {lock_path}")
is_stale = True # Conservative: treat unparsable as stale
return MetalsLockInfo(
pid=pid,
port=port,
lock_path=lock_path,
is_stale=is_stale,
raw_content=content[:200], # Truncate for logging
)
def is_metals_process_alive(pid: int) -> bool:
"""
Check if a process with the given PID is alive and is a Metals process.
Args:
pid: Process ID to check
Returns:
True if the process exists and appears to be a Metals LSP server.
"""
try:
import psutil
proc = psutil.Process(pid)
if not proc.is_running():
return False
# Check if this is actually a Metals process
cmdline = " ".join(proc.cmdline()).lower()
return _is_metals_cmdline(cmdline)
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
return False
except Exception as e:
log.debug(f"Error checking process {pid}: {e}")
return False
def _is_metals_cmdline(cmdline: str) -> bool:
"""Check if a command line string appears to be a Metals LSP server."""
cmdline_lower = cmdline.lower()
# Metals is a Scala/Java application
if "java" not in cmdline_lower:
return False
# Look for Metals-specific identifiers
return any(
marker in cmdline_lower
for marker in [
"metals",
"org.scalameta",
"-dmetals.client",
]
)
def _is_port_in_use_by_metals(port: int) -> bool:
"""Check if the given port is in use by a Metals process."""
try:
import psutil
for conn in psutil.net_connections(kind="tcp"):
if conn.laddr.port == port and conn.status == "LISTEN":
try:
proc = psutil.Process(conn.pid)
cmdline = " ".join(proc.cmdline()).lower()
if _is_metals_cmdline(cmdline):
return True
except (psutil.NoSuchProcess, psutil.AccessDenied):
pass
return False
except (psutil.AccessDenied, OSError) as e:
# On some systems, net_connections requires elevated privileges
log.debug(f"Could not check port {port}: {e}")
return False
def check_metals_db_status(project_path: Path) -> tuple[MetalsDbStatus, MetalsLockInfo | None]:
"""
Check the status of the Metals H2 database for a project.
This function determines whether it's safe to start a new Metals instance
and whether any cleanup is needed.
Args:
project_path: Path to the project root directory
Returns:
A tuple of (status, lock_info) where lock_info is populated for
ACTIVE_INSTANCE and STALE_LOCK statuses.
"""
metals_dir = project_path / ".metals"
db_path = metals_dir / "metals.mv.db"
lock_path = metals_dir / "metals.mv.db.lock.db"
if not metals_dir.exists():
log.debug(f"No .metals directory found at {metals_dir}")
return MetalsDbStatus.NO_DATABASE, None
if not db_path.exists():
log.debug(f"No Metals database found at {db_path}")
return MetalsDbStatus.NO_DATABASE, None
if not lock_path.exists():
log.debug(f"Metals database exists but no lock file at {lock_path}")
return MetalsDbStatus.NO_LOCK, None
# Lock file exists - parse it to determine status
lock_info = parse_h2_lock_file(lock_path)
if lock_info is None:
# Lock file exists but couldn't be read - treat as stale
log.warning(f"Could not read lock file at {lock_path}, treating as stale")
return MetalsDbStatus.STALE_LOCK, MetalsLockInfo(
pid=None,
port=None,
lock_path=lock_path,
is_stale=True,
raw_content="<unreadable>",
)
if lock_info.is_stale:
log.debug(f"Stale Metals lock detected: {lock_info}")
return MetalsDbStatus.STALE_LOCK, lock_info
else:
log.debug(f"Active Metals instance detected: {lock_info}")
return MetalsDbStatus.ACTIVE_INSTANCE, lock_info
def cleanup_stale_lock(lock_path: Path) -> bool:
"""
Remove a stale H2 database lock file.
This should only be called when we've verified the owning process is dead.
Removing a lock file from a running process could cause database corruption.
Args:
lock_path: Path to the .lock.db file to remove
Returns:
True if cleanup succeeded, False otherwise.
"""
if not lock_path.exists():
log.debug(f"Lock file already removed: {lock_path}")
return True
try:
lock_path.unlink()
log.info(f"Cleaned up stale Metals lock file: {lock_path}")
return True
except PermissionError as e:
log.warning(f"Permission denied removing stale lock file {lock_path}: {e}")
return False
except OSError as e:
log.warning(f"Could not remove stale lock file {lock_path}: {e}")
return False
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/util/metals_db_utils.py",
"license": "MIT License",
"lines": 216,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:test/serena/config/test_global_ignored_paths.py | import os
import shutil
import tempfile
from pathlib import Path
from serena.config.serena_config import ProjectConfig, RegisteredProject, SerenaConfig
from serena.project import Project
from solidlsp.ls_config import Language
def _create_test_project(
project_root: Path,
project_ignored_paths: list[str] | None = None,
global_ignored_paths: list[str] | None = None,
) -> Project:
"""Helper to create a Project with the given ignored paths configuration."""
config = ProjectConfig(
project_name="test_project",
languages=[Language.PYTHON],
ignored_paths=project_ignored_paths or [],
ignore_all_files_in_gitignore=False,
)
serena_config = SerenaConfig(gui_log_window=False, web_dashboard=False, ignored_paths=global_ignored_paths)
return Project(
project_root=str(project_root),
project_config=config,
serena_config=serena_config,
)
class TestGlobalIgnoredPaths:
"""Tests for system-global ignored_paths feature."""
def setup_method(self) -> None:
self.test_dir = tempfile.mkdtemp()
self.project_path = Path(self.test_dir)
# Create some test files and directories
(self.project_path / "main.py").write_text("print('hello')")
os.makedirs(self.project_path / "node_modules" / "pkg", exist_ok=True)
(self.project_path / "node_modules" / "pkg" / "index.js").write_text("module.exports = {}")
os.makedirs(self.project_path / "build", exist_ok=True)
(self.project_path / "build" / "output.js").write_text("compiled")
os.makedirs(self.project_path / "src", exist_ok=True)
(self.project_path / "src" / "app.py").write_text("def app(): pass")
(self.project_path / "debug.log").write_text("log data")
def teardown_method(self) -> None:
shutil.rmtree(self.test_dir)
def test_global_ignored_paths_are_applied(self) -> None:
"""Global ignored_paths from SerenaConfig are respected by Project.is_ignored_path()."""
project = _create_test_project(
self.project_path,
global_ignored_paths=["node_modules"],
)
assert project.is_ignored_path(str(self.project_path / "node_modules" / "pkg" / "index.js"))
assert not project.is_ignored_path(str(self.project_path / "src" / "app.py"))
def test_additive_merge_of_global_and_project_patterns(self) -> None:
"""Global + project patterns are merged additively (both applied)."""
project = _create_test_project(
self.project_path,
project_ignored_paths=["build"],
global_ignored_paths=["node_modules"],
)
# Global pattern should be applied
assert project.is_ignored_path(str(self.project_path / "node_modules" / "pkg" / "index.js"))
# Project pattern should also be applied
assert project.is_ignored_path(str(self.project_path / "build" / "output.js"))
# Non-ignored files should not be affected
assert not project.is_ignored_path(str(self.project_path / "src" / "app.py"))
def test_empty_global_ignored_paths_has_no_effect(self) -> None:
"""Empty global ignored_paths (default) has no effect on existing behavior."""
project = _create_test_project(
self.project_path,
project_ignored_paths=["build"],
global_ignored_paths=[],
)
# Project pattern still works
assert project.is_ignored_path(str(self.project_path / "build" / "output.js"))
# Non-ignored files still accessible
assert not project.is_ignored_path(str(self.project_path / "node_modules" / "pkg" / "index.js"))
def test_duplicate_patterns_across_global_and_project(self) -> None:
"""Duplicate patterns across global and project do not cause errors."""
project = _create_test_project(
self.project_path,
project_ignored_paths=["node_modules", "build"],
global_ignored_paths=["node_modules", "build"],
)
assert project.is_ignored_path(str(self.project_path / "node_modules" / "pkg" / "index.js"))
assert project.is_ignored_path(str(self.project_path / "build" / "output.js"))
assert not project.is_ignored_path(str(self.project_path / "src" / "app.py"))
def test_glob_patterns_in_global_ignored_paths(self) -> None:
"""Global ignored_paths support gitignore-style glob patterns."""
project = _create_test_project(
self.project_path,
global_ignored_paths=["*.log"],
)
assert project.is_ignored_path(str(self.project_path / "debug.log"))
assert not project.is_ignored_path(str(self.project_path / "main.py"))
class TestRegisteredProjectGlobalIgnoredPaths:
"""RegisteredProject.get_project_instance() correctly passes global patterns to Project."""
def setup_method(self) -> None:
self.test_dir = tempfile.mkdtemp()
self.project_path = Path(self.test_dir).resolve()
(self.project_path / "main.py").write_text("print('hello')")
os.makedirs(self.project_path / "node_modules", exist_ok=True)
(self.project_path / "node_modules" / "pkg.js").write_text("module")
def teardown_method(self) -> None:
shutil.rmtree(self.test_dir)
def test_get_project_instance_passes_global_ignored_paths(self) -> None:
"""RegisteredProject.get_project_instance() passes global_ignored_paths to Project."""
config = ProjectConfig(
project_name="test_project",
languages=[Language.PYTHON],
ignored_paths=[],
ignore_all_files_in_gitignore=False,
)
serena_config = SerenaConfig(gui_log_window=False, web_dashboard=False, ignored_paths=["node_modules"])
registered = RegisteredProject(
project_root=str(self.project_path),
project_config=config,
)
project = registered.get_project_instance(serena_config=serena_config)
assert project.is_ignored_path(str(self.project_path / "node_modules" / "pkg.js"))
def test_get_project_instance_without_global_ignored_paths(self) -> None:
"""RegisteredProject without global_ignored_paths defaults to empty."""
config = ProjectConfig(
project_name="test_project",
languages=[Language.PYTHON],
ignored_paths=[],
ignore_all_files_in_gitignore=False,
)
registered = RegisteredProject(
project_root=str(self.project_path),
project_config=config,
)
serena_config = SerenaConfig(gui_log_window=False, web_dashboard=False, ignored_paths=[])
project = registered.get_project_instance(serena_config=serena_config)
assert not project.is_ignored_path(str(self.project_path / "node_modules" / "pkg.js"))
def test_from_project_root_passes_global_ignored_paths(self) -> None:
"""RegisteredProject.from_project_root() threads global_ignored_paths to Project."""
# Create a minimal project.yml so from_project_root can load config
serena_dir = self.project_path / ".serena"
serena_dir.mkdir(exist_ok=True)
(serena_dir / "project.yml").write_text(
'project_name: "test_project"\nlanguages: ["python"]\nignored_paths: []\nignore_all_files_in_gitignore: false\n'
)
serena_config = SerenaConfig(gui_log_window=False, web_dashboard=False, ignored_paths=["node_modules"])
registered = RegisteredProject.from_project_root(
str(self.project_path),
serena_config=serena_config,
)
project = registered.get_project_instance(serena_config=serena_config)
assert project.is_ignored_path(str(self.project_path / "node_modules" / "pkg.js"))
def test_from_project_instance_passes_global_ignored_paths(self) -> None:
"""RegisteredProject.from_project_instance() threads global_ignored_paths to Project."""
config = ProjectConfig(
project_name="test_project",
languages=[Language.PYTHON],
ignored_paths=[],
ignore_all_files_in_gitignore=False,
)
serena_config = SerenaConfig(gui_log_window=False, web_dashboard=False, ignored_paths=["node_modules"])
project = Project(
project_root=str(self.project_path),
project_config=config,
serena_config=serena_config,
)
registered = RegisteredProject.from_project_instance(project)
# The registered project already has a project_instance, so get_project_instance() returns it directly
retrieved = registered.get_project_instance(serena_config=serena_config)
assert retrieved.is_ignored_path(str(self.project_path / "node_modules" / "pkg.js"))
class TestGlobalIgnoredPathsWithGitignore:
"""Global ignored_paths combined with ignore_all_files_in_gitignore produces correct three-way merge."""
def setup_method(self) -> None:
self.test_dir = tempfile.mkdtemp()
self.project_path = Path(self.test_dir).resolve()
# Create test files
(self.project_path / "main.py").write_text("print('hello')")
os.makedirs(self.project_path / "node_modules", exist_ok=True)
(self.project_path / "node_modules" / "pkg.js").write_text("module")
os.makedirs(self.project_path / "dist", exist_ok=True)
(self.project_path / "dist" / "bundle.js").write_text("bundled")
os.makedirs(self.project_path / "build", exist_ok=True)
(self.project_path / "build" / "output.js").write_text("compiled")
# Create .gitignore that ignores dist/
(self.project_path / ".gitignore").write_text("dist/\n")
def teardown_method(self) -> None:
shutil.rmtree(self.test_dir)
def test_three_way_merge_global_project_and_gitignore(self) -> None:
"""Global patterns, project patterns, and .gitignore patterns are all applied together."""
config = ProjectConfig(
project_name="test_project",
languages=[Language.PYTHON],
ignored_paths=["build"],
ignore_all_files_in_gitignore=True,
)
serena_config = SerenaConfig(gui_log_window=False, web_dashboard=False, ignored_paths=["node_modules"])
project = Project(
project_root=str(self.project_path),
project_config=config,
serena_config=serena_config,
)
# Global pattern: node_modules
assert project.is_ignored_path(str(self.project_path / "node_modules" / "pkg.js"))
# Project pattern: build
assert project.is_ignored_path(str(self.project_path / "build" / "output.js"))
# Gitignore pattern: dist/
assert project.is_ignored_path(str(self.project_path / "dist" / "bundle.js"))
# Non-ignored file
assert not project.is_ignored_path(str(self.project_path / "main.py"))
class TestSerenaConfigIgnoredPaths:
"""Config loading with ignored_paths in serena_config.yml works correctly."""
def test_serena_config_default_ignored_paths(self) -> None:
"""SerenaConfig defaults to empty ignored_paths."""
config = SerenaConfig(gui_log_window=False, web_dashboard=False)
assert config.ignored_paths == []
def test_serena_config_with_ignored_paths(self) -> None:
"""SerenaConfig can be created with explicit ignored_paths."""
config = SerenaConfig(
gui_log_window=False,
web_dashboard=False,
ignored_paths=["node_modules", "*.log", "build"],
)
assert config.ignored_paths == ["node_modules", "*.log", "build"]
| {
"repo_id": "oraios/serena",
"file_path": "test/serena/config/test_global_ignored_paths.py",
"license": "MIT License",
"lines": 217,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/serena/test_jetbrains_plugin_client.py | import pytest
from serena.constants import REPO_ROOT
from serena.jetbrains.jetbrains_plugin_client import JetBrainsPluginClient
class TestSerenaJetBrainsPluginClient:
@pytest.mark.parametrize(
"serena_path, plugin_path",
[
(REPO_ROOT, REPO_ROOT),
("/home/user/project", "/home/user/project"),
("/home/user/project", "//wsl.localhost/Ubuntu-24.04/home/user/project"),
("/home/user/project", "//wsl$/Ubuntu/home/user/project"),
("/home/user/project", "//wsl$/Ubuntu/home/user/project"),
("/mnt/c/Users/user/projects/my-app", "/workspaces/serena/C:/Users/user/projects/my-app"),
],
)
def test_path_matching(self, serena_path, plugin_path) -> None:
assert JetBrainsPluginClient._paths_match(serena_path, plugin_path)
| {
"repo_id": "oraios/serena",
"file_path": "test/serena/test_jetbrains_plugin_client.py",
"license": "MIT License",
"lines": 17,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/serena/test_set_modes.py | """Tests for SerenaAgent.set_modes() to verify that mode switching works correctly."""
import logging
from serena.agent import SerenaAgent
from serena.config.serena_config import ModeSelectionDefinition, SerenaConfig
class TestSetModes:
"""Test that set_modes correctly changes active modes."""
def _create_agent(self, modes: ModeSelectionDefinition | None = None) -> SerenaAgent:
config = SerenaConfig(gui_log_window=False, web_dashboard=False, log_level=logging.ERROR)
return SerenaAgent(serena_config=config, modes=modes)
def test_set_modes_changes_active_modes(self) -> None:
"""Test that calling set_modes actually changes the active modes."""
agent = self._create_agent(modes=ModeSelectionDefinition(default_modes=["editing", "interactive"]))
initial_mode_names = sorted(m.name for m in agent.get_active_modes())
assert "editing" in initial_mode_names
assert "interactive" in initial_mode_names
# Switch to planning mode
agent.set_modes(["planning", "interactive"])
new_mode_names = sorted(m.name for m in agent.get_active_modes())
assert "planning" in new_mode_names
assert "interactive" in new_mode_names
assert "editing" not in new_mode_names
def test_set_modes_overrides_config_defaults(self) -> None:
"""Test that set_modes takes precedence over config defaults."""
config = SerenaConfig(gui_log_window=False, web_dashboard=False, log_level=logging.ERROR)
config.default_modes = ["editing", "interactive"]
agent = SerenaAgent(serena_config=config)
# Verify config defaults are active
initial_mode_names = [m.name for m in agent.get_active_modes()]
assert "editing" in initial_mode_names
# Switch modes — should override config defaults
agent.set_modes(["planning", "one-shot"])
new_mode_names = [m.name for m in agent.get_active_modes()]
assert "planning" in new_mode_names
assert "one-shot" in new_mode_names
assert "editing" not in new_mode_names
def test_set_modes_persists_after_repeated_calls(self) -> None:
"""Test that set_modes result persists (modes don't revert)."""
agent = self._create_agent(modes=ModeSelectionDefinition(default_modes=["editing"]))
agent.set_modes(["planning"])
mode_names_1 = [m.name for m in agent.get_active_modes()]
assert "planning" in mode_names_1
# Call get_active_modes again — should still be planning
mode_names_2 = [m.name for m in agent.get_active_modes()]
assert mode_names_1 == mode_names_2
def test_set_modes_can_switch_back(self) -> None:
"""Test that modes can be switched back to original after switching away."""
agent = self._create_agent(modes=ModeSelectionDefinition(default_modes=["editing", "interactive"]))
# Switch away
agent.set_modes(["planning", "one-shot"])
assert "planning" in [m.name for m in agent.get_active_modes()]
# Switch back
agent.set_modes(["editing", "interactive"])
mode_names = [m.name for m in agent.get_active_modes()]
assert "editing" in mode_names
assert "interactive" in mode_names
assert "planning" not in mode_names
| {
"repo_id": "oraios/serena",
"file_path": "test/serena/test_set_modes.py",
"license": "MIT License",
"lines": 56,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/cpp/test_cpp_basic.py | """
Basic tests for C/C++ language server integration (clangd and ccls).
This module tests both Language.CPP (clangd) and Language.CPP_CCLS (ccls)
using the same test repository. Tests are skipped if the respective language
server is not available.
"""
import os
import pathlib
import shutil
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
from solidlsp.ls_utils import SymbolUtils
def _ccls_available() -> bool:
return shutil.which("ccls") is not None
_cpp_servers: list[Language] = [Language.CPP]
if _ccls_available():
_cpp_servers.append(Language.CPP_CCLS)
@pytest.mark.cpp
@pytest.mark.skipif(not _cpp_servers, reason="No C++ language server (clangd or ccls) available")
class TestCppLanguageServer:
"""Tests for C/C++ language servers (clangd and ccls)."""
@pytest.mark.parametrize("language_server", _cpp_servers, indirect=True)
def test_find_symbol(self, language_server: SolidLanguageServer) -> None:
"""Test that symbol tree contains expected functions."""
symbols = language_server.request_full_symbol_tree()
assert SymbolUtils.symbol_tree_contains_name(symbols, "add"), "Function 'add' not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "main"), "Function 'main' not found in symbol tree"
@pytest.mark.parametrize("language_server", _cpp_servers, indirect=True)
def test_get_document_symbols(self, language_server: SolidLanguageServer) -> None:
"""Test document symbols for a.cpp."""
file_path = os.path.join("a.cpp")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
# Flatten nested structure if needed
symbol_list = symbols[0] if symbols and isinstance(symbols[0], list) else symbols
names = [s.get("name") for s in symbol_list]
assert "main" in names, f"Expected 'main' in document symbols, got: {names}"
@pytest.mark.parametrize("language_server", _cpp_servers, indirect=True)
def test_find_referencing_symbols_across_files(self, language_server: SolidLanguageServer) -> None:
"""Test finding references to 'add' function across files."""
# Locate 'add' in b.cpp
file_path = os.path.join("b.cpp")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
symbol_list = symbols[0] if symbols and isinstance(symbols[0], list) else symbols
add_symbol = None
for sym in symbol_list:
if sym.get("name") == "add":
add_symbol = sym
break
assert add_symbol is not None, "Could not find 'add' function symbol in b.cpp"
sel_start = add_symbol["selectionRange"]["start"]
refs = language_server.request_references(file_path, sel_start["line"], sel_start["character"])
ref_files = [ref.get("relativePath", "") for ref in refs]
assert any("a.cpp" in ref_file for ref_file in ref_files), f"Should find reference in a.cpp, {refs=}"
# Verify second call returns same results (stability check)
def _ref_key(ref: dict) -> tuple:
rp = ref.get("relativePath", "")
rng = ref.get("range") or {}
s = rng.get("start") or {}
e = rng.get("end") or {}
return (
rp,
s.get("line", -1),
s.get("character", -1),
e.get("line", -1),
e.get("character", -1),
)
refs2 = language_server.request_references(file_path, sel_start["line"], sel_start["character"])
assert sorted(map(_ref_key, refs2)) == sorted(map(_ref_key, refs)), "Reference results should be stable across calls"
@pytest.mark.parametrize("language_server", _cpp_servers, indirect=True)
@pytest.mark.xfail(
strict=True,
reason=("Both clangd and ccls do not support cross-file references for newly created files that were never opened by the LS."),
)
def test_find_references_in_newly_written_file(self, language_server: SolidLanguageServer) -> None:
# Create a new file that references the 'add' function from b.cpp
new_file_path = os.path.join("temp_new_file.cpp")
new_file_abs_path = os.path.join(language_server.repository_root_path, new_file_path)
try:
# Write the new file with a reference to add()
with open(new_file_abs_path, "w", encoding="utf-8") as f:
f.write(
"""
#include "b.hpp"
int use_add() {
int result = add(5, 3);
return result;
}
"""
)
# Open the new file so clangd knows about it
with language_server.open_file(new_file_path):
# Request document symbols to ensure the file is fully loaded by clangd
new_file_symbols = language_server.request_document_symbols(new_file_path).get_all_symbols_and_roots()
assert new_file_symbols, "New file should have symbols"
# Verify the file stays in open_file_buffers after the context exits
uri = pathlib.Path(new_file_abs_path).as_uri()
assert uri in language_server.open_file_buffers, "File should remain in open_file_buffers"
# Find the 'add' symbol in b.cpp
b_file_path = os.path.join("b.cpp")
symbols = language_server.request_document_symbols(b_file_path).get_all_symbols_and_roots()
symbol_list = symbols[0] if symbols and isinstance(symbols[0], list) else symbols
add_symbol = None
for sym in symbol_list:
if sym.get("name") == "add":
add_symbol = sym
break
assert add_symbol is not None, "Could not find 'add' function symbol in b.cpp"
# Request references for 'add'
sel_start = add_symbol["selectionRange"]["start"]
refs = language_server.request_references(b_file_path, sel_start["line"], sel_start["character"])
ref_files = [ref.get("relativePath", "") for ref in refs]
# Should find reference in the newly written file
assert any(
"temp_new_file.cpp" in ref_file for ref_file in ref_files
), f"Should find reference in newly written temp_new_file.cpp, {ref_files=}"
finally:
# Clean up the new file
if os.path.exists(new_file_abs_path):
os.remove(new_file_abs_path)
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/cpp/test_cpp_basic.py",
"license": "MIT License",
"lines": 120,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/csharp/test_csharp_nuget_download.py | """Tests for C# language server NuGet package download from NuGet.org."""
import tempfile
from unittest.mock import patch
import pytest
from solidlsp.language_servers.common import RuntimeDependency
from solidlsp.language_servers.csharp_language_server import CSharpLanguageServer
from solidlsp.settings import SolidLSPSettings
@pytest.mark.csharp
class TestNuGetOrgDownload:
"""Test downloading Roslyn language server packages from NuGet.org."""
def test_download_nuget_package_uses_direct_url(self):
"""Test that _download_nuget_package uses the URL from RuntimeDependency directly."""
with tempfile.TemporaryDirectory() as temp_dir:
# Create a RuntimeDependency with a NuGet.org URL
test_dependency = RuntimeDependency(
id="TestPackage",
description="Test package from NuGet.org",
package_name="roslyn-language-server.linux-x64",
package_version="5.5.0-2.26078.4",
url="https://www.nuget.org/api/v2/package/roslyn-language-server.linux-x64/5.5.0-2.26078.4",
platform_id="linux-x64",
archive_type="nupkg",
binary_name="Microsoft.CodeAnalysis.LanguageServer.dll",
extract_path="content/LanguageServer/linux-x64",
)
# Mock the dependency provider
mock_settings = SolidLSPSettings()
custom_settings = SolidLSPSettings.CustomLSSettings({})
dependency_provider = CSharpLanguageServer.DependencyProvider(
custom_settings=custom_settings,
ls_resources_dir=temp_dir,
solidlsp_settings=mock_settings,
repository_root_path="/fake/repo",
)
# Mock urllib.request.urlretrieve to capture the URL being used
with patch("solidlsp.language_servers.csharp_language_server.urllib.request.urlretrieve") as mock_retrieve:
with patch("solidlsp.language_servers.csharp_language_server.SafeZipExtractor"):
try:
dependency_provider._download_nuget_package(test_dependency)
except Exception:
# Expected to fail since we're mocking, but we want to check the URL
pass
# Verify that urlretrieve was called with the NuGet.org URL
assert mock_retrieve.called, "urlretrieve should be called"
called_url = mock_retrieve.call_args[0][0]
assert called_url == test_dependency.url, f"Should use URL from RuntimeDependency: {test_dependency.url}"
assert "nuget.org" in called_url, "Should use NuGet.org URL"
assert "azure" not in called_url.lower(), "Should not use Azure feed"
def test_runtime_dependencies_use_nuget_org_urls(self):
"""Test that _RUNTIME_DEPENDENCIES are configured with NuGet.org URLs."""
from solidlsp.language_servers.csharp_language_server import _RUNTIME_DEPENDENCIES
# Check language server dependencies
lang_server_deps = [dep for dep in _RUNTIME_DEPENDENCIES if dep.id == "CSharpLanguageServer"]
assert len(lang_server_deps) == 6, "Should have 6 language server platform variants"
for dep in lang_server_deps:
# Verify package name uses roslyn-language-server
assert dep.package_name is not None, f"Package name should be set for {dep.platform_id}"
assert dep.package_name.startswith(
"roslyn-language-server."
), f"Package name should start with 'roslyn-language-server.' but got: {dep.package_name}"
# Verify version is the newer NuGet.org version
assert dep.package_version == "5.5.0-2.26078.4", f"Should use NuGet.org version 5.5.0-2.26078.4, got: {dep.package_version}"
# Verify URL points to NuGet.org
assert dep.url is not None, f"URL should be set for {dep.platform_id}"
assert "nuget.org" in dep.url, f"URL should point to nuget.org, got: {dep.url}"
assert "azure" not in dep.url.lower(), f"URL should not point to Azure feed, got: {dep.url}"
def test_download_method_does_not_call_azure_feed(self):
"""Test that the new download method does not attempt to access Azure feed."""
with tempfile.TemporaryDirectory() as temp_dir:
test_dependency = RuntimeDependency(
id="TestPackage",
description="Test package",
package_name="roslyn-language-server.linux-x64",
package_version="5.5.0-2.26078.4",
url="https://www.nuget.org/api/v2/package/roslyn-language-server.linux-x64/5.5.0-2.26078.4",
platform_id="linux-x64",
archive_type="nupkg",
binary_name="test.dll",
)
mock_settings = SolidLSPSettings()
custom_settings = SolidLSPSettings.CustomLSSettings({})
dependency_provider = CSharpLanguageServer.DependencyProvider(
custom_settings=custom_settings,
ls_resources_dir=temp_dir,
solidlsp_settings=mock_settings,
repository_root_path="/fake/repo",
)
# Mock urllib.request.urlopen to track if Azure feed is accessed
with patch("solidlsp.language_servers.csharp_language_server.urllib.request.urlopen") as mock_urlopen:
with patch("solidlsp.language_servers.csharp_language_server.urllib.request.urlretrieve"):
with patch("solidlsp.language_servers.csharp_language_server.SafeZipExtractor"):
try:
dependency_provider._download_nuget_package(test_dependency)
except Exception:
pass
# Verify that urlopen was NOT called (no service index lookup)
assert not mock_urlopen.called, "Should not call urlopen for Azure service index lookup"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/csharp/test_csharp_nuget_download.py",
"license": "MIT License",
"lines": 97,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/scala/test_metals_db_utils.py | """
Unit tests for the metals_db_utils module.
Tests the detection of Metals H2 database status and stale lock handling.
"""
import os
from pathlib import Path
from unittest.mock import MagicMock, patch
import pytest
from solidlsp.util.metals_db_utils import (
MetalsDbStatus,
check_metals_db_status,
cleanup_stale_lock,
is_metals_process_alive,
parse_h2_lock_file,
)
@pytest.mark.scala
class TestParseH2LockFile:
"""Tests for parse_h2_lock_file function."""
def test_returns_none_when_file_does_not_exist(self, tmp_path: Path) -> None:
"""Should return None when lock file doesn't exist."""
lock_path = tmp_path / "nonexistent.lock.db"
result = parse_h2_lock_file(lock_path)
assert result is None
def test_parses_server_format_lock_file(self, tmp_path: Path) -> None:
"""Should parse lock file with server:host:port format."""
lock_path = tmp_path / "metals.mv.db.lock.db"
lock_path.write_text("server:localhost:9092\n")
result = parse_h2_lock_file(lock_path)
assert result is not None
assert result.port == 9092
assert result.lock_path == lock_path
def test_parses_port_only_format(self, tmp_path: Path) -> None:
"""Should extract port from content containing a port number."""
lock_path = tmp_path / "metals.mv.db.lock.db"
lock_path.write_text("some content 9123 more content\n")
result = parse_h2_lock_file(lock_path)
assert result is not None
assert result.port == 9123
def test_parses_pid_format(self, tmp_path: Path) -> None:
"""Should extract PID from lock file content."""
lock_path = tmp_path / "metals.mv.db.lock.db"
lock_path.write_text("pid=12345\nserver:localhost:9092\n")
result = parse_h2_lock_file(lock_path)
assert result is not None
assert result.pid == 12345
assert result.port == 9092
def test_handles_unreadable_file(self, tmp_path: Path) -> None:
"""Should return None for unreadable files."""
lock_path = tmp_path / "metals.mv.db.lock.db"
lock_path.write_text("content")
# Make file unreadable (Unix only)
if os.name != "nt":
lock_path.chmod(0o000)
try:
result = parse_h2_lock_file(lock_path)
assert result is None
finally:
lock_path.chmod(0o644)
def test_truncates_raw_content(self, tmp_path: Path) -> None:
"""Should truncate raw_content to 200 chars."""
lock_path = tmp_path / "metals.mv.db.lock.db"
long_content = "x" * 500
lock_path.write_text(long_content)
result = parse_h2_lock_file(lock_path)
assert result is not None
assert len(result.raw_content) == 200
@pytest.mark.scala
class TestIsMetalsProcessAlive:
"""Tests for is_metals_process_alive function."""
def test_returns_false_for_nonexistent_process(self) -> None:
"""Should return False for a PID that doesn't exist."""
# Use a very high PID that's unlikely to exist
result = is_metals_process_alive(999999999)
assert result is False
def test_returns_true_for_metals_process(self) -> None:
"""Should return True for a running Metals process."""
import psutil
with patch.object(psutil, "Process") as mock_process_class:
mock_proc = MagicMock()
mock_proc.is_running.return_value = True
mock_proc.cmdline.return_value = [
"java",
"-Dmetals.client=vscode",
"-jar",
"metals.jar",
]
mock_process_class.return_value = mock_proc
result = is_metals_process_alive(12345)
assert result is True
def test_returns_false_for_non_metals_java_process(self) -> None:
"""Should return False for a Java process that isn't Metals."""
import psutil
with patch.object(psutil, "Process") as mock_process_class:
mock_proc = MagicMock()
mock_proc.is_running.return_value = True
mock_proc.cmdline.return_value = [
"java",
"-jar",
"some-other-app.jar",
]
mock_process_class.return_value = mock_proc
result = is_metals_process_alive(12345)
assert result is False
def test_returns_false_for_non_running_process(self) -> None:
"""Should return False for a process that's not running."""
import psutil
with patch.object(psutil, "Process") as mock_process_class:
mock_proc = MagicMock()
mock_proc.is_running.return_value = False
mock_process_class.return_value = mock_proc
result = is_metals_process_alive(12345)
assert result is False
def test_handles_no_such_process(self) -> None:
"""Should return False when process doesn't exist."""
import psutil
with patch.object(psutil, "Process") as mock_process_class:
mock_process_class.side_effect = psutil.NoSuchProcess(12345)
result = is_metals_process_alive(12345)
assert result is False
@pytest.mark.scala
class TestCheckMetalsDbStatus:
"""Tests for check_metals_db_status function."""
def test_returns_no_database_when_metals_dir_missing(self, tmp_path: Path) -> None:
"""Should return NO_DATABASE when .metals directory doesn't exist."""
status, lock_info = check_metals_db_status(tmp_path)
assert status == MetalsDbStatus.NO_DATABASE
assert lock_info is None
def test_returns_no_database_when_db_missing(self, tmp_path: Path) -> None:
"""Should return NO_DATABASE when database file doesn't exist."""
metals_dir = tmp_path / ".metals"
metals_dir.mkdir()
status, lock_info = check_metals_db_status(tmp_path)
assert status == MetalsDbStatus.NO_DATABASE
assert lock_info is None
def test_returns_no_lock_when_lock_file_missing(self, tmp_path: Path) -> None:
"""Should return NO_LOCK when database exists but lock doesn't."""
metals_dir = tmp_path / ".metals"
metals_dir.mkdir()
db_path = metals_dir / "metals.mv.db"
db_path.touch()
status, lock_info = check_metals_db_status(tmp_path)
assert status == MetalsDbStatus.NO_LOCK
assert lock_info is None
def test_returns_active_instance_when_process_alive(self, tmp_path: Path) -> None:
"""Should return ACTIVE_INSTANCE when lock holder is running."""
import solidlsp.util.metals_db_utils as metals_utils
metals_dir = tmp_path / ".metals"
metals_dir.mkdir()
db_path = metals_dir / "metals.mv.db"
db_path.touch()
lock_path = metals_dir / "metals.mv.db.lock.db"
lock_path.write_text("pid=12345\nserver:localhost:9092\n")
with patch.object(metals_utils, "is_metals_process_alive", return_value=True):
status, lock_info = check_metals_db_status(tmp_path)
assert status == MetalsDbStatus.ACTIVE_INSTANCE
assert lock_info is not None
assert lock_info.is_stale is False
def test_returns_stale_lock_when_process_dead(self, tmp_path: Path) -> None:
"""Should return STALE_LOCK when lock holder is not running."""
import solidlsp.util.metals_db_utils as metals_utils
metals_dir = tmp_path / ".metals"
metals_dir.mkdir()
db_path = metals_dir / "metals.mv.db"
db_path.touch()
lock_path = metals_dir / "metals.mv.db.lock.db"
lock_path.write_text("pid=12345\nserver:localhost:9092\n")
with patch.object(metals_utils, "is_metals_process_alive", return_value=False):
status, lock_info = check_metals_db_status(tmp_path)
assert status == MetalsDbStatus.STALE_LOCK
assert lock_info is not None
assert lock_info.is_stale is True
@pytest.mark.scala
class TestCleanupStaleLock:
"""Tests for cleanup_stale_lock function."""
def test_removes_lock_file(self, tmp_path: Path) -> None:
"""Should successfully remove a lock file."""
lock_path = tmp_path / "metals.mv.db.lock.db"
lock_path.touch()
result = cleanup_stale_lock(lock_path)
assert result is True
assert not lock_path.exists()
def test_returns_true_when_file_already_removed(self, tmp_path: Path) -> None:
"""Should return True when file doesn't exist."""
lock_path = tmp_path / "nonexistent.lock.db"
result = cleanup_stale_lock(lock_path)
assert result is True
def test_returns_false_on_permission_error(self, tmp_path: Path) -> None:
"""Should return False when file can't be removed due to permissions."""
if os.name == "nt":
pytest.skip("Permission test not reliable on Windows")
lock_path = tmp_path / "metals.mv.db.lock.db"
lock_path.touch()
# Make parent directory read-only
tmp_path.chmod(0o555)
try:
result = cleanup_stale_lock(lock_path)
assert result is False
assert lock_path.exists()
finally:
tmp_path.chmod(0o755)
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/scala/test_metals_db_utils.py",
"license": "MIT License",
"lines": 200,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/scala/test_scala_stale_lock_handling.py | """
Tests for ScalaLanguageServer stale lock detection and handling modes.
These tests verify the ScalaLanguageServer's behavior when detecting stale Metals locks.
They use mocking to avoid requiring an actual Scala project or Metals server.
"""
import logging
from pathlib import Path
from typing import Any
from unittest.mock import MagicMock, patch
import pytest
from _pytest.logging import LogCaptureFixture
from solidlsp.language_servers.scala_language_server import ScalaLanguageServer
from solidlsp.ls_config import Language
from solidlsp.settings import SolidLSPSettings
from solidlsp.util.metals_db_utils import MetalsDbStatus, MetalsLockInfo
pytestmark = pytest.mark.scala
class TestStaleLockHandling:
"""Tests for ScalaLanguageServer stale lock detection and handling modes."""
@pytest.fixture
def sample_lock_info(self, tmp_path: Path) -> MetalsLockInfo:
"""Create a sample MetalsLockInfo for testing."""
lock_path = tmp_path / ".metals" / "metals.mv.db.lock.db"
return MetalsLockInfo(
pid=12345,
port=9092,
lock_path=lock_path,
is_stale=True,
raw_content="SERVER:localhost:9092:12345",
)
@pytest.fixture
def mock_setup_dependencies(self) -> Any:
"""Mock _setup_runtime_dependencies to avoid needing Java/Coursier."""
return patch.object(
ScalaLanguageServer,
"_setup_runtime_dependencies",
return_value=["/fake/metals"],
)
def test_auto_clean_mode_cleans_stale_lock(
self,
tmp_path: Path,
sample_lock_info: MetalsLockInfo,
mock_setup_dependencies: Any,
caplog: LogCaptureFixture,
) -> None:
"""Test AUTO_CLEAN mode removes stale lock and proceeds."""
cleanup_mock = MagicMock(return_value=True)
with (
patch(
"solidlsp.util.metals_db_utils.check_metals_db_status",
return_value=(MetalsDbStatus.STALE_LOCK, sample_lock_info),
),
patch(
"solidlsp.util.metals_db_utils.cleanup_stale_lock",
cleanup_mock,
),
mock_setup_dependencies,
patch.object(ScalaLanguageServer, "__init__", lambda self, *args, **kwargs: None),
):
# Create instance without calling __init__
ls = object.__new__(ScalaLanguageServer)
settings = SolidLSPSettings(ls_specific_settings={Language.SCALA: {"on_stale_lock": "auto-clean"}})
# Call the method under test
ls._check_metals_db_status(str(tmp_path), settings)
# Verify cleanup was called
cleanup_mock.assert_called_once_with(sample_lock_info.lock_path)
def test_warn_mode_logs_warning_without_cleanup(
self,
tmp_path: Path,
sample_lock_info: MetalsLockInfo,
mock_setup_dependencies: Any,
caplog: LogCaptureFixture,
) -> None:
"""Test WARN mode logs warning but does not clean up."""
cleanup_mock = MagicMock(return_value=True)
with (
patch(
"solidlsp.util.metals_db_utils.check_metals_db_status",
return_value=(MetalsDbStatus.STALE_LOCK, sample_lock_info),
),
patch(
"solidlsp.util.metals_db_utils.cleanup_stale_lock",
cleanup_mock,
),
mock_setup_dependencies,
caplog.at_level(logging.WARNING),
):
ls = object.__new__(ScalaLanguageServer)
settings = SolidLSPSettings(ls_specific_settings={Language.SCALA: {"on_stale_lock": "warn"}})
ls._check_metals_db_status(str(tmp_path), settings)
# Verify cleanup was NOT called
cleanup_mock.assert_not_called()
# Verify warning was logged
assert any("Stale Metals lock detected" in record.message for record in caplog.records)
def test_fail_mode_raises_exception(
self,
tmp_path: Path,
sample_lock_info: MetalsLockInfo,
mock_setup_dependencies: Any,
) -> None:
"""Test FAIL mode raises MetalsStaleLockError."""
from solidlsp.ls_exceptions import MetalsStaleLockError
with (
patch(
"solidlsp.util.metals_db_utils.check_metals_db_status",
return_value=(MetalsDbStatus.STALE_LOCK, sample_lock_info),
),
mock_setup_dependencies,
pytest.raises(MetalsStaleLockError) as exc_info,
):
ls = object.__new__(ScalaLanguageServer)
settings = SolidLSPSettings(ls_specific_settings={Language.SCALA: {"on_stale_lock": "fail"}})
ls._check_metals_db_status(str(tmp_path), settings)
assert str(sample_lock_info.lock_path) in str(exc_info.value)
def test_active_instance_logs_info_when_enabled(
self,
tmp_path: Path,
mock_setup_dependencies: Any,
caplog: LogCaptureFixture,
) -> None:
"""Test ACTIVE_INSTANCE logs info message when log_multi_instance_notice is true."""
active_lock_info = MetalsLockInfo(
pid=99999,
port=9092,
lock_path=tmp_path / ".metals" / "metals.mv.db.lock.db",
is_stale=False,
raw_content="SERVER:localhost:9092:99999",
)
with (
patch(
"solidlsp.util.metals_db_utils.check_metals_db_status",
return_value=(MetalsDbStatus.ACTIVE_INSTANCE, active_lock_info),
),
mock_setup_dependencies,
caplog.at_level(logging.INFO),
):
ls = object.__new__(ScalaLanguageServer)
settings = SolidLSPSettings(
ls_specific_settings={
Language.SCALA: {
"on_stale_lock": "auto-clean",
"log_multi_instance_notice": True,
}
}
)
ls._check_metals_db_status(str(tmp_path), settings)
# Verify info about multi-instance was logged
assert any("Another Metals instance detected" in record.message for record in caplog.records)
def test_active_instance_silent_when_notice_disabled(
self,
tmp_path: Path,
mock_setup_dependencies: Any,
caplog: LogCaptureFixture,
) -> None:
"""Test ACTIVE_INSTANCE does not log when log_multi_instance_notice is false."""
active_lock_info = MetalsLockInfo(
pid=99999,
port=9092,
lock_path=tmp_path / ".metals" / "metals.mv.db.lock.db",
is_stale=False,
raw_content="SERVER:localhost:9092:99999",
)
with (
patch(
"solidlsp.util.metals_db_utils.check_metals_db_status",
return_value=(MetalsDbStatus.ACTIVE_INSTANCE, active_lock_info),
),
mock_setup_dependencies,
caplog.at_level(logging.INFO),
):
ls = object.__new__(ScalaLanguageServer)
settings = SolidLSPSettings(
ls_specific_settings={
Language.SCALA: {
"on_stale_lock": "auto-clean",
"log_multi_instance_notice": False,
}
}
)
ls._check_metals_db_status(str(tmp_path), settings)
# Verify no multi-instance message was logged
assert not any("Another Metals instance detected" in record.message for record in caplog.records)
def test_no_database_proceeds_silently(
self,
tmp_path: Path,
mock_setup_dependencies: Any,
caplog: LogCaptureFixture,
) -> None:
"""Test NO_DATABASE status proceeds without any special handling."""
with (
patch(
"solidlsp.util.metals_db_utils.check_metals_db_status",
return_value=(MetalsDbStatus.NO_DATABASE, None),
),
mock_setup_dependencies,
caplog.at_level(logging.DEBUG),
):
ls = object.__new__(ScalaLanguageServer)
settings = SolidLSPSettings(ls_specific_settings={Language.SCALA: {"on_stale_lock": "auto-clean"}})
# Should complete without error
ls._check_metals_db_status(str(tmp_path), settings)
# No stale lock or multi-instance messages
assert not any("Stale" in record.message for record in caplog.records)
assert not any("Another Metals instance" in record.message for record in caplog.records)
def test_no_lock_proceeds_silently(
self,
tmp_path: Path,
mock_setup_dependencies: Any,
caplog: LogCaptureFixture,
) -> None:
"""Test NO_LOCK status proceeds without any special handling."""
with (
patch(
"solidlsp.util.metals_db_utils.check_metals_db_status",
return_value=(MetalsDbStatus.NO_LOCK, None),
),
mock_setup_dependencies,
caplog.at_level(logging.DEBUG),
):
ls = object.__new__(ScalaLanguageServer)
settings = SolidLSPSettings(ls_specific_settings={Language.SCALA: {"on_stale_lock": "auto-clean"}})
# Should complete without error
ls._check_metals_db_status(str(tmp_path), settings)
# No stale lock or multi-instance messages
assert not any("Stale" in record.message for record in caplog.records)
assert not any("Another Metals instance" in record.message for record in caplog.records)
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/scala/test_scala_stale_lock_handling.py",
"license": "MIT License",
"lines": 225,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/test_ls_common.py | import os
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
class TestLanguageServerCommonFunctionality:
"""Test common functionality of SolidLanguageServer base implementation (not language-specific behaviour)."""
@pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
def test_open_file_cache_invalidate(self, language_server: SolidLanguageServer) -> None:
"""
Tests that the file buffer cache is invalidated when the file is changed on disk.
"""
file_path = os.path.join(language_server.repository_root_path, "test_open_file.py")
test_string1 = "# foo"
test_string2 = "# bar"
with open(file_path, "w") as f:
f.write(test_string1)
try:
with language_server.open_file(file_path) as fb:
assert fb.contents == test_string1
# apply external change to file
with open(file_path, "w") as f:
f.write(test_string2)
# Explicitly bump mtime into the future so the cache sees a change.
# Relying on natural mtime advancement is flaky because many filesystems
# (ext4, tmpfs) have only 1-second mtime granularity, and both writes
# can land in the same second.
stat = os.stat(file_path)
os.utime(file_path, (stat.st_atime, stat.st_mtime + 2))
# check that the file buffer has been invalidated and reloaded
assert fb.contents == test_string2
finally:
os.remove(file_path)
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/test_ls_common.py",
"license": "MIT License",
"lines": 32,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:src/serena/util/dataclass.py | from dataclasses import MISSING, Field
from typing import Any, cast
def get_dataclass_default(cls: type, field_name: str) -> Any:
"""
Gets the default value of a dataclass field.
:param cls: The dataclass type.
:param field_name: The name of the field.
:return: The default value of the field (either from default or default_factory).
"""
field = cast(Field, cls.__dataclass_fields__[field_name]) # type: ignore[attr-defined]
if field.default is not MISSING:
return field.default
if field.default_factory is not MISSING: # default_factory is a function
return field.default_factory()
raise AttributeError(f"{field_name} has no default")
| {
"repo_id": "oraios/serena",
"file_path": "src/serena/util/dataclass.py",
"license": "MIT License",
"lines": 15,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
oraios/serena:src/serena/util/yaml.py | import logging
import os
from collections.abc import Sequence
from enum import Enum
from typing import Any
from ruamel.yaml import YAML, CommentToken, StreamMark
from ruamel.yaml.comments import CommentedMap
from serena.constants import SERENA_FILE_ENCODING
log = logging.getLogger(__name__)
def _create_yaml(preserve_comments: bool = False) -> YAML:
"""
Creates a YAML that can load/save with comments if preserve_comments is True.
"""
typ = None if preserve_comments else "safe"
result = YAML(typ=typ)
result.preserve_quotes = preserve_comments
return result
class YamlCommentNormalisation(Enum):
"""
Defines a normalisation to be applied to the comment representation in a ruamel CommentedMap.
Note that even though a YAML document may seem to consistently contain, for example, leading comments
before a key only, ruamel may still parse some comments as trailing comments of the previous key
or as document-level comments.
The normalisations define ways to adjust the comment representation accordingly, clearly associating
comments with the keys they belong to.
"""
NONE = "none"
"""
No comment normalisation is performed.
Comments are kept as parsed by ruamel.yaml.
"""
LEADING = "leading"
"""
Document is assumed to have leading comments only, i.e. comments before keys, only full-line comments.
This normalisation achieves that comments are properly associated with keys as leading comments.
"""
LEADING_WITH_CONVERSION_FROM_TRAILING = "leading_with_conversion_from_trailing"
"""
Document is assumed to have a mixture of leading comments (before keys) and trailing comments (after values), only full-line comments.
This normalisation achieves that all comments are converted to leading comments and properly associated with keys.
"""
# NOTE: Normalisation for trailing comments was attempted but is extremely hard, because
# it is difficult to position the comments properly after values, especially for complex values.
DOC_COMMENT_INDEX_POST = 0
DOC_COMMENT_INDEX_PRE = 1
# item comment indices: (post key, pre key, post value, pre value)
ITEM_COMMENT_INDEX_BEFORE = 1 # (pre-key; must be a list of CommentToken at this index)
ITEM_COMMENT_INDEX_AFTER = 2 # (post-value; must be an instance of CommentToken at this index)
def load_yaml(path: str, comment_normalisation: YamlCommentNormalisation = YamlCommentNormalisation.NONE) -> CommentedMap:
"""
:param path: the path to the YAML file to load
:param comment_normalisation: the comment normalisation to apply after loading
:return: the loaded commented map
"""
with open(path, encoding=SERENA_FILE_ENCODING) as f:
yaml = _create_yaml(preserve_comments=True)
commented_map: CommentedMap = yaml.load(f)
normalise_yaml_comments(commented_map, comment_normalisation)
return commented_map
def normalise_yaml_comments(commented_map: CommentedMap, comment_normalisation: YamlCommentNormalisation) -> None:
"""
Applies the given comment normalisation to the given commented map in-place.
:param commented_map: the commented map whose comments are to be normalised
:param comment_normalisation: the comment normalisation to apply
"""
def make_list(comment_entry: Any) -> list:
if not isinstance(comment_entry, list):
return [comment_entry]
return comment_entry
def make_unit(comment_entry: Any) -> Any:
"""
Converts a list-valued comment entry into a single comment entry.
"""
if isinstance(comment_entry, list):
if len(comment_entry) == 0:
return None
elif len(comment_entry) == 1:
return comment_entry[0]
else:
if all(isinstance(item, CommentToken) for item in comment_entry):
start_mark = StreamMark(name="", index=0, line=0, column=0)
comment_str = "".join(item.value for item in comment_entry)
if not comment_str.startswith("\n"):
comment_str = "\n" + comment_str
return CommentToken(value=comment_str, start_mark=start_mark, end_mark=None)
else:
types = set(type(item) for item in comment_entry)
log.warning("Unhandled types in list-valued comment entry: %s; not updating entry", types)
return None
else:
return comment_entry
def trailing_to_leading(comment_entry: Any) -> Any:
if comment_entry is None:
return None
token_list = make_list(comment_entry)
first_token = token_list[0]
if isinstance(first_token, CommentToken):
# remove leading newline if present
if first_token.value.startswith("\n"):
first_token.value = first_token.value[1:]
return token_list
match comment_normalisation:
case YamlCommentNormalisation.NONE:
pass
case YamlCommentNormalisation.LEADING | YamlCommentNormalisation.LEADING_WITH_CONVERSION_FROM_TRAILING:
# Comments are supposed to be leading comments (i.e., before a key and associated with the key).
# When ruamel parses a YAML, however, comments belonging to a key may be stored as trailing
# comments of the previous key or as a document-level comment.
# Move them accordingly.
keys = list(commented_map.keys())
comment_items = commented_map.ca.items
doc_comment = commented_map.ca.comment
preceding_comment = None
for i, key in enumerate(keys):
current_comment = comment_items.get(key, [None] * 4)
comment_items[key] = current_comment
if current_comment[ITEM_COMMENT_INDEX_BEFORE] is None:
if i == 0 and doc_comment is not None and doc_comment[DOC_COMMENT_INDEX_PRE] is not None:
# move document pre-comment to leading comment of first key
current_comment[ITEM_COMMENT_INDEX_BEFORE] = make_list(doc_comment[DOC_COMMENT_INDEX_PRE])
doc_comment[DOC_COMMENT_INDEX_PRE] = None
elif preceding_comment is not None and preceding_comment[ITEM_COMMENT_INDEX_AFTER] is not None:
# move trailing comment of preceding key to leading comment of current key
current_comment[ITEM_COMMENT_INDEX_BEFORE] = trailing_to_leading(preceding_comment[ITEM_COMMENT_INDEX_AFTER])
preceding_comment[ITEM_COMMENT_INDEX_AFTER] = None
preceding_comment = current_comment
if comment_normalisation == YamlCommentNormalisation.LEADING_WITH_CONVERSION_FROM_TRAILING:
# Second pass: conversion of trailing comments
# If a leading comment ends with "\n\n", i.e. it has an empty line between the comment and the key,
# it was actually intended as a trailing comment for the preceding key, so we associate it with
# the preceding key instead (if the preceding key has no leading comment already).
preceding_comment = None
for key in keys:
current_comment = comment_items.get(key, [None] * 4)
if current_comment[ITEM_COMMENT_INDEX_BEFORE] is not None:
token_list = make_list(current_comment[ITEM_COMMENT_INDEX_BEFORE])
if len(token_list) > 0:
last_token = token_list[-1]
if isinstance(last_token, CommentToken) and last_token.value.endswith("\n\n"):
# move comment to preceding key, removing the empty line,
# and adding an empty line at the beginning instead
if preceding_comment is not None and yaml_comment_entry_is_empty(
preceding_comment[ITEM_COMMENT_INDEX_BEFORE]
):
last_token.value = last_token.value[:-1]
first_token = token_list[0]
if isinstance(first_token, CommentToken):
if not first_token.value.startswith("\n"):
first_token.value = "\n" + first_token.value
preceding_comment[ITEM_COMMENT_INDEX_BEFORE] = token_list
current_comment[ITEM_COMMENT_INDEX_BEFORE] = None
preceding_comment = current_comment
case _:
raise ValueError(f"Unhandled comment normalisation: {comment_normalisation}")
def save_yaml(path: str, data: dict | CommentedMap, preserve_comments: bool = True) -> None:
yaml = _create_yaml(preserve_comments)
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, "w", encoding=SERENA_FILE_ENCODING) as f:
yaml.dump(data, f)
def yaml_comment_entry_is_empty(comment_entry: Any) -> bool:
if comment_entry is None:
return True
elif isinstance(comment_entry, list):
for item in comment_entry:
if isinstance(item, CommentToken):
if item.value.strip() != "":
return False
else:
return False
return True
elif isinstance(comment_entry, CommentToken):
return comment_entry.value.strip() == ""
else:
return False
def transfer_missing_yaml_comments_by_index(
source: CommentedMap, target: CommentedMap, indices: list[int], forced_update_keys: Sequence[str] = ()
) -> None:
"""
:param source: the source, from which to transfer missing comments
:param target: the target map, whose comments will be updated
:param indices: list of comment indices to transfer
:param forced_update_keys: keys for which comments are always transferred, even if present in target
"""
for key in target.keys():
if key in source:
source_comment = source.ca.items.get(key)
if source_comment is None:
continue
target_comment = target.ca.items.get(key)
# initialise target comment if needed
if target_comment is None:
target_comment = [None] * 4
target.ca.items[key] = target_comment
# transfer comments at specified indices
for index in indices:
is_forced_update = key in forced_update_keys
if is_forced_update or yaml_comment_entry_is_empty(target_comment[index]):
target_comment[index] = source_comment[index]
def transfer_missing_yaml_comments(
source: CommentedMap, target: CommentedMap, comment_normalisation: YamlCommentNormalisation, forced_update_keys: Sequence[str] = ()
) -> None:
"""
Transfers missing comments from source to target YAML.
:param source: the source, from which to transfer missing comments
:param target: the target map, whose comments will be updated.
:param comment_normalisation: the comment normalisation to assume; if NONE, no comments are transferred
:param forced_update_keys: keys for which comments are always transferred, even if present in target
"""
match comment_normalisation:
case YamlCommentNormalisation.NONE:
pass
case YamlCommentNormalisation.LEADING | YamlCommentNormalisation.LEADING_WITH_CONVERSION_FROM_TRAILING:
transfer_missing_yaml_comments_by_index(source, target, [ITEM_COMMENT_INDEX_BEFORE], forced_update_keys=forced_update_keys)
case _:
raise ValueError(f"Unhandled comment normalisation: {comment_normalisation}")
| {
"repo_id": "oraios/serena",
"file_path": "src/serena/util/yaml.py",
"license": "MIT License",
"lines": 215,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:test/solidlsp/pascal/test_pascal_auto_update.py | """
Unit tests for the Pascal language server auto-update functionality.
These tests validate the version comparison, checksum verification,
and other helper methods without requiring network access or the
actual Pascal language server.
"""
from __future__ import annotations
import hashlib
import os
import tarfile
import tempfile
import time
import pytest
from solidlsp.language_servers.pascal_server import PascalLanguageServer
pytestmark = [pytest.mark.pascal]
class TestVersionNormalization:
"""Test version string normalization."""
def test_normalize_version_with_v_prefix(self) -> None:
"""Test that 'v' prefix is stripped."""
assert PascalLanguageServer._normalize_version("v1.0.0") == "1.0.0"
def test_normalize_version_with_capital_v_prefix(self) -> None:
"""Test that 'V' prefix is stripped."""
assert PascalLanguageServer._normalize_version("V1.0.0") == "1.0.0"
def test_normalize_version_without_prefix(self) -> None:
"""Test version without prefix is unchanged."""
assert PascalLanguageServer._normalize_version("1.0.0") == "1.0.0"
def test_normalize_version_with_whitespace(self) -> None:
"""Test that whitespace is stripped."""
assert PascalLanguageServer._normalize_version(" v1.0.0 ") == "1.0.0"
def test_normalize_version_empty(self) -> None:
"""Test empty version returns empty string."""
assert PascalLanguageServer._normalize_version("") == ""
def test_normalize_version_none(self) -> None:
"""Test None returns empty string."""
assert PascalLanguageServer._normalize_version(None) == ""
class TestVersionComparison:
"""Test version comparison logic."""
def test_newer_version_major(self) -> None:
"""Test detection of newer major version."""
assert PascalLanguageServer._is_newer_version("v2.0.0", "v1.0.0") is True
def test_newer_version_minor(self) -> None:
"""Test detection of newer minor version."""
assert PascalLanguageServer._is_newer_version("v1.1.0", "v1.0.0") is True
def test_newer_version_patch(self) -> None:
"""Test detection of newer patch version."""
assert PascalLanguageServer._is_newer_version("v1.0.1", "v1.0.0") is True
def test_same_version(self) -> None:
"""Test same version returns False."""
assert PascalLanguageServer._is_newer_version("v1.0.0", "v1.0.0") is False
def test_older_version(self) -> None:
"""Test older version returns False."""
assert PascalLanguageServer._is_newer_version("v1.0.0", "v2.0.0") is False
def test_latest_none_returns_false(self) -> None:
"""Test None latest version returns False."""
assert PascalLanguageServer._is_newer_version(None, "v1.0.0") is False
def test_local_none_returns_true(self) -> None:
"""Test None local version returns True (first install)."""
assert PascalLanguageServer._is_newer_version("v1.0.0", None) is True
def test_both_none_returns_false(self) -> None:
"""Test both None returns False."""
assert PascalLanguageServer._is_newer_version(None, None) is False
def test_version_with_different_lengths(self) -> None:
"""Test versions with different number of parts."""
assert PascalLanguageServer._is_newer_version("v1.0.1", "v1.0") is True
assert PascalLanguageServer._is_newer_version("v1.0", "v1.0.1") is False
def test_version_with_prerelease(self) -> None:
"""Test versions with prerelease suffixes."""
# Prerelease suffix is ignored, only numeric parts are compared
assert PascalLanguageServer._is_newer_version("v1.1.0-beta", "v1.0.0") is True
class TestSHA256Checksum:
"""Test SHA256 checksum calculation and verification."""
def test_calculate_sha256(self) -> None:
"""Test SHA256 calculation for a known content."""
with tempfile.NamedTemporaryFile(mode="wb", delete=False) as f:
f.write(b"test content")
temp_path = f.name
try:
result = PascalLanguageServer._calculate_sha256(temp_path)
expected = hashlib.sha256(b"test content").hexdigest()
assert result == expected
finally:
os.unlink(temp_path)
def test_verify_checksum_correct(self) -> None:
"""Test checksum verification with correct checksum."""
with tempfile.NamedTemporaryFile(mode="wb", delete=False) as f:
f.write(b"test content")
temp_path = f.name
try:
expected = hashlib.sha256(b"test content").hexdigest()
assert PascalLanguageServer._verify_checksum(temp_path, expected) is True
finally:
os.unlink(temp_path)
def test_verify_checksum_incorrect(self) -> None:
"""Test checksum verification with incorrect checksum."""
with tempfile.NamedTemporaryFile(mode="wb", delete=False) as f:
f.write(b"test content")
temp_path = f.name
try:
wrong_checksum = "0" * 64
assert PascalLanguageServer._verify_checksum(temp_path, wrong_checksum) is False
finally:
os.unlink(temp_path)
def test_verify_checksum_case_insensitive(self) -> None:
"""Test checksum verification is case insensitive."""
with tempfile.NamedTemporaryFile(mode="wb", delete=False) as f:
f.write(b"test content")
temp_path = f.name
try:
expected = hashlib.sha256(b"test content").hexdigest().upper()
assert PascalLanguageServer._verify_checksum(temp_path, expected) is True
finally:
os.unlink(temp_path)
class TestTarfileSafety:
"""Test tarfile path traversal protection."""
def test_safe_tar_member_normal_path(self) -> None:
"""Test normal path is considered safe."""
member = tarfile.TarInfo(name="pasls")
assert PascalLanguageServer._is_safe_tar_member(member, "/tmp/target") is True
def test_safe_tar_member_nested_path(self) -> None:
"""Test nested path is considered safe."""
member = tarfile.TarInfo(name="subdir/pasls")
assert PascalLanguageServer._is_safe_tar_member(member, "/tmp/target") is True
def test_unsafe_tar_member_path_traversal(self) -> None:
"""Test path traversal is detected."""
member = tarfile.TarInfo(name="../etc/passwd")
assert PascalLanguageServer._is_safe_tar_member(member, "/tmp/target") is False
def test_unsafe_tar_member_hidden_traversal(self) -> None:
"""Test hidden path traversal in nested path."""
member = tarfile.TarInfo(name="subdir/../../etc/passwd")
assert PascalLanguageServer._is_safe_tar_member(member, "/tmp/target") is False
def test_safe_tar_member_similar_name(self) -> None:
"""Test path containing '..' in filename (not directory) is safe."""
member = tarfile.TarInfo(name="file..name")
assert PascalLanguageServer._is_safe_tar_member(member, "/tmp/target") is True
class TestMetadataManagement:
"""Test metadata directory and file management."""
def test_meta_dir_creates_directory(self) -> None:
"""Test _meta_dir creates directory if not exists."""
with tempfile.TemporaryDirectory() as temp_dir:
meta_path = PascalLanguageServer._meta_dir(temp_dir)
assert os.path.exists(meta_path)
assert meta_path == os.path.join(temp_dir, PascalLanguageServer.META_DIR)
def test_meta_file_returns_correct_path(self) -> None:
"""Test _meta_file returns correct path."""
with tempfile.TemporaryDirectory() as temp_dir:
meta_file = PascalLanguageServer._meta_file(temp_dir, "version")
expected = os.path.join(temp_dir, PascalLanguageServer.META_DIR, "version")
assert meta_file == expected
class TestUpdateCheckTiming:
"""Test update check timing logic."""
def test_should_check_update_no_last_check(self) -> None:
"""Test should check when no last_check file exists."""
with tempfile.TemporaryDirectory() as temp_dir:
assert PascalLanguageServer._should_check_update(temp_dir) is True
def test_should_check_update_recent_check(self) -> None:
"""Test should not check when recently checked."""
with tempfile.TemporaryDirectory() as temp_dir:
# Create meta dir and last_check file with current time
meta_dir = PascalLanguageServer._meta_dir(temp_dir)
last_check_file = os.path.join(meta_dir, "last_check")
with open(last_check_file, "w") as f:
f.write(str(time.time()))
assert PascalLanguageServer._should_check_update(temp_dir) is False
def test_should_check_update_old_check(self) -> None:
"""Test should check when last check was > 24 hours ago."""
with tempfile.TemporaryDirectory() as temp_dir:
# Create meta dir and last_check file with old time
meta_dir = PascalLanguageServer._meta_dir(temp_dir)
last_check_file = os.path.join(meta_dir, "last_check")
old_time = time.time() - (PascalLanguageServer.UPDATE_CHECK_INTERVAL + 3600)
with open(last_check_file, "w") as f:
f.write(str(old_time))
assert PascalLanguageServer._should_check_update(temp_dir) is True
def test_update_last_check_creates_file(self) -> None:
"""Test _update_last_check creates timestamp file."""
with tempfile.TemporaryDirectory() as temp_dir:
PascalLanguageServer._update_last_check(temp_dir)
last_check_file = PascalLanguageServer._meta_file(temp_dir, "last_check")
assert os.path.exists(last_check_file)
with open(last_check_file) as f:
timestamp = float(f.read().strip())
assert abs(timestamp - time.time()) < 5 # within 5 seconds
class TestVersionPersistence:
"""Test local version persistence."""
def test_save_and_get_local_version(self) -> None:
"""Test saving and retrieving local version."""
with tempfile.TemporaryDirectory() as temp_dir:
PascalLanguageServer._save_local_version(temp_dir, "v1.0.0")
version = PascalLanguageServer._get_local_version(temp_dir)
assert version == "v1.0.0"
def test_get_local_version_not_exists(self) -> None:
"""Test getting version when file doesn't exist."""
with tempfile.TemporaryDirectory() as temp_dir:
version = PascalLanguageServer._get_local_version(temp_dir)
assert version is None
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/pascal/test_pascal_auto_update.py",
"license": "MIT License",
"lines": 195,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/test_rename_didopen.py | from unittest.mock import MagicMock
from solidlsp.ls import SolidLanguageServer
class DummyLanguageServer(SolidLanguageServer):
def _start_server(self) -> None:
raise AssertionError("Not used in this test")
def test_request_rename_symbol_edit_opens_file_before_rename(tmp_path) -> None:
(tmp_path / "index.ts").write_text("export const x = 1;\n", encoding="utf-8")
events: list[str] = []
notify = MagicMock()
notify.did_open_text_document.side_effect = lambda *_args, **_kwargs: events.append("didOpen")
notify.did_close_text_document.side_effect = lambda *_args, **_kwargs: events.append("didClose")
send = MagicMock()
send.rename.side_effect = lambda *_args, **_kwargs: events.append("rename")
server = MagicMock()
server.notify = notify
server.send = send
language_server = object.__new__(DummyLanguageServer)
language_server.repository_root_path = str(tmp_path)
language_server.server_started = True
language_server.open_file_buffers = {}
language_server._encoding = "utf-8"
language_server.language_id = "typescript"
language_server.server = server
result = language_server.request_rename_symbol_edit(
relative_file_path="index.ts",
line=0,
column=0,
new_name="y",
)
assert result is None
assert events == ["didOpen", "rename", "didClose"]
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/test_rename_didopen.py",
"license": "MIT License",
"lines": 31,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:scripts/print_language_list.py | """
Prints the list of supported languages, for use in the project.yml template
"""
from solidlsp.ls_config import Language
if __name__ == "__main__":
lang_strings = sorted([l.value for l in Language])
max_len = max(len(s) for s in lang_strings)
fmt = f"%-{max_len+2}s"
for i, l in enumerate(lang_strings):
if i % 5 == 0:
print("\n# ", end="")
print(" " + fmt % l, end="")
| {
"repo_id": "oraios/serena",
"file_path": "scripts/print_language_list.py",
"license": "MIT License",
"lines": 12,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
oraios/serena:src/serena/util/version.py | class Version:
"""
Represents a version, specifically the numeric components of a version string.
Suffixes like "rc1" or "-dev" are ignored, i.e. for a version string like "1.2.3rc1",
the components are [1, 2, 3].
"""
def __init__(self, package_or_version: object | str):
"""
:param package_or_version: a package object (with a `__version__` attribute) or a version string like "1.2.3".
If a version contains a suffix (like "1.2.3rc1" or "1.2.3-dev"), the suffix is ignored.
"""
if isinstance(package_or_version, str):
version_string = package_or_version
elif hasattr(package_or_version, "__version__"):
package_version_string = getattr(package_or_version, "__version__", None)
if package_version_string is None:
raise ValueError(f"The given package object {package_or_version} has no __version__ attribute")
version_string = package_version_string
else:
raise ValueError("The given argument must be either a version string or a package object with a __version__ attribute")
self.version_string = version_string
self.components = self._get_version_components(version_string)
def __repr__(self) -> str:
return self.version_string
@staticmethod
def _get_version_components(version_string: str) -> list[int]:
components = version_string.split(".")
int_components = []
for c in components:
num_str = ""
for ch in c:
if ch.isdigit():
num_str += ch
else:
break
if num_str == "":
break
int_components.append(int(num_str))
return int_components
def is_at_least(self, *components: int) -> bool:
"""
Checks this version against the given version components.
This version object must contain at least the respective number of components
:param components: version components in order (i.e. major, minor, patch, etc.)
:return: True if the version is at least the given version, False otherwise
"""
for i, desired_min_version in enumerate(components):
actual_version = self.components[i]
if actual_version < desired_min_version:
return False
elif actual_version > desired_min_version:
return True
return True
def is_at_most(self, *components: int) -> bool:
"""
Checks this version against the given version components.
This version object must contain at least the respective number of components
:param components: version components in order (i.e. major, minor, patch, etc.)
:return: True if the version is at most the given version, False otherwise
"""
for i, desired_max_version in enumerate(components):
actual_version = self.components[i]
if actual_version > desired_max_version:
return False
elif actual_version < desired_max_version:
return True
return True
def is_equal(self, *components: int) -> bool:
"""
Checks this version against the given version components.
This version object must contain at least the respective number of components
:param components: version components in order (i.e. major, minor, patch, etc.)
:return: True if the version is the given version, False otherwise
"""
return self.components[: len(components)] == list(components)
| {
"repo_id": "oraios/serena",
"file_path": "src/serena/util/version.py",
"license": "MIT License",
"lines": 75,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/pascal_server.py | """
Provides Pascal/Free Pascal specific instantiation of the LanguageServer class using pasls.
Contains various configurations and settings specific to Pascal and Free Pascal.
pasls installation strategy:
1. Use existing pasls from PATH
2. Download prebuilt binary from GitHub releases (auto-updated)
Supported platforms for binary download:
- linux-x64, linux-arm64
- osx-x64, osx-arm64
- win-x64
Auto-update features:
- Checks for updates every 24 hours via GitHub API
- SHA256 checksum verification before installation
- Atomic update with rollback on failure
- Windows file locking detection
You can pass the following entries in ls_specific_settings["pascal"]:
Environment variables (recommended for CodeTools configuration):
- pp: Path to FPC compiler driver, must be "fpc.exe" (e.g., "D:/laz32/fpc/bin/i386-win32/fpc.exe").
Do NOT use backend compilers like ppc386.exe or ppcx64.exe - CodeTools queries fpc.exe for
configuration (fpc -iV, fpc -iTO, etc.). This is the most important setting for hover/navigation.
- fpcdir: Path to FPC source directory (e.g., "D:/laz32/fpcsrc"). Helps CodeTools locate
standard library sources for better navigation.
- lazarusdir: Path to Lazarus directory (e.g., "D:/laz32/lazarus"). Required for Lazarus
projects using LCL and other Lazarus components.
Target platform overrides (use only if pp setting is not sufficient):
- fpc_target: Override target OS (e.g., "Win32", "Win64", "Linux"). Sets FPCTARGET env var.
- fpc_target_cpu: Override target CPU (e.g., "i386", "x86_64", "aarch64"). Sets FPCTARGETCPU.
Example configuration in ~/.serena/serena_config.yml:
ls_specific_settings:
pascal:
pp: "D:/laz32/fpc/bin/i386-win32/fpc.exe"
fpcdir: "D:/laz32/fpcsrc"
lazarusdir: "D:/laz32/lazarus"
"""
from __future__ import annotations
import hashlib
import json
import logging
import os
import pathlib
import platform
import shutil
import tarfile
import threading
import time
import urllib.error
import urllib.request
import uuid
import zipfile
from solidlsp.language_servers.common import RuntimeDependency, RuntimeDependencyCollection, quote_windows_path
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class PascalLanguageServer(SolidLanguageServer):
"""
Provides Pascal specific instantiation of the LanguageServer class using pasls.
Contains various configurations and settings specific to Free Pascal and Lazarus.
"""
# URL configuration
PASLS_RELEASES_URL = "https://github.com/zen010101/pascal-language-server/releases/latest/download"
PASLS_API_URL = "https://api.github.com/repos/zen010101/pascal-language-server/releases/latest"
# Update check interval (seconds)
UPDATE_CHECK_INTERVAL = 86400 # 24 hours
# Metadata directory name
META_DIR = ".meta"
# Network timeout (seconds)
NETWORK_TIMEOUT = 10
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Creates a PascalLanguageServer instance. This class is not meant to be instantiated directly.
Use LanguageServer.create() instead.
"""
pasls_executable_path = self._setup_runtime_dependencies(solidlsp_settings)
# Build environment variables for pasls
# These control CodeTools' configuration and target platform settings
proc_env: dict[str, str] = {}
# Read from ls_specific_settings["pascal"]
from solidlsp.ls_config import Language
pascal_settings = solidlsp_settings.get_ls_specific_settings(Language.PASCAL)
# pp: Path to FPC compiler driver (must be fpc.exe, NOT ppc386.exe/ppcx64.exe)
# CodeTools queries fpc.exe for configuration via "fpc -iV", "fpc -iTO", etc.
pp = pascal_settings.get("pp", "")
if pp:
proc_env["PP"] = pp
log.info(f"Setting PP={pp} from ls_specific_settings")
# fpcdir: Path to FPC source directory (e.g., "D:/laz32/fpcsrc")
fpcdir = pascal_settings.get("fpcdir", "")
if fpcdir:
proc_env["FPCDIR"] = fpcdir
log.info(f"Setting FPCDIR={fpcdir} from ls_specific_settings")
# lazarusdir: Path to Lazarus directory (e.g., "D:/laz32/lazarus")
lazarusdir = pascal_settings.get("lazarusdir", "")
if lazarusdir:
proc_env["LAZARUSDIR"] = lazarusdir
log.info(f"Setting LAZARUSDIR={lazarusdir} from ls_specific_settings")
# fpc_target: Override target OS (e.g., "Win32", "Win64", "Linux")
fpc_target = pascal_settings.get("fpc_target", "")
if fpc_target:
proc_env["FPCTARGET"] = fpc_target
log.info(f"Setting FPCTARGET={fpc_target} from ls_specific_settings")
# fpc_target_cpu: Override target CPU (e.g., "i386", "x86_64", "aarch64")
fpc_target_cpu = pascal_settings.get("fpc_target_cpu", "")
if fpc_target_cpu:
proc_env["FPCTARGETCPU"] = fpc_target_cpu
log.info(f"Setting FPCTARGETCPU={fpc_target_cpu} from ls_specific_settings")
super().__init__(
config,
repository_root_path,
ProcessLaunchInfo(cmd=pasls_executable_path, cwd=repository_root_path, env=proc_env),
"pascal",
solidlsp_settings,
)
self.server_ready = threading.Event()
# ============== Metadata Directory Management ==============
@classmethod
def _meta_dir(cls, pasls_dir: str) -> str:
"""Get metadata directory path, create if not exists."""
meta_path = os.path.join(pasls_dir, cls.META_DIR)
os.makedirs(meta_path, exist_ok=True)
return meta_path
@classmethod
def _meta_file(cls, pasls_dir: str, filename: str) -> str:
"""Get metadata file path."""
return os.path.join(cls._meta_dir(pasls_dir), filename)
# ============== Version Management ==============
@staticmethod
def _normalize_version(version: str | None) -> str:
"""Normalize version string by removing 'v' prefix and whitespace."""
if not version:
return ""
return version.strip().lstrip("vV")
@classmethod
def _is_newer_version(cls, latest: str | None, local: str | None) -> bool:
"""Compare versions, return True if latest is newer than local."""
if not latest:
return False
if not local:
return True
latest_norm = cls._normalize_version(latest)
local_norm = cls._normalize_version(local)
if not latest_norm:
return False
if not local_norm:
return True
try:
def parse_version(v: str) -> list[int]:
parts = []
for part in v.split("."):
num = ""
for c in part:
if c.isdigit():
num += c
else:
break
parts.append(int(num) if num else 0)
return parts
latest_parts = parse_version(latest_norm)
local_parts = parse_version(local_norm)
# Pad to same length
max_len = max(len(latest_parts), len(local_parts))
latest_parts.extend([0] * (max_len - len(latest_parts)))
local_parts.extend([0] * (max_len - len(local_parts)))
return latest_parts > local_parts
except Exception:
log.warning(f"Failed to parse versions for comparison: {latest_norm} vs {local_norm}")
return False
@classmethod
def _get_latest_version(cls) -> str | None:
"""Get latest version from GitHub API, return None on failure."""
try:
headers = {"Accept": "application/vnd.github.v3+json", "User-Agent": "Serena-LSP"}
# Support GITHUB_TOKEN for CI environments with rate limits
github_token = os.environ.get("GITHUB_TOKEN")
if github_token:
headers["Authorization"] = f"token {github_token}"
req = urllib.request.Request(cls.PASLS_API_URL, headers=headers)
with urllib.request.urlopen(req, timeout=cls.NETWORK_TIMEOUT) as response:
data = json.loads(response.read().decode())
return data.get("tag_name")
except Exception as e:
log.debug(f"Failed to get latest pasls version: {type(e).__name__}: {e}")
return None
@classmethod
def _get_local_version(cls, pasls_dir: str) -> str | None:
"""Read local version file."""
version_file = cls._meta_file(pasls_dir, "version")
if os.path.exists(version_file):
try:
with open(version_file, encoding="utf-8") as f:
return f.read().strip()
except OSError:
return None
return None
@classmethod
def _save_local_version(cls, pasls_dir: str, version: str) -> None:
"""Save version to local file."""
version_file = cls._meta_file(pasls_dir, "version")
try:
with open(version_file, "w", encoding="utf-8") as f:
f.write(version)
except OSError as e:
log.warning(f"Failed to save version file: {e}")
# ============== Update Check Timing ==============
@classmethod
def _should_check_update(cls, pasls_dir: str) -> bool:
"""Check if we should query for updates (more than 24 hours since last check)."""
last_check_file = cls._meta_file(pasls_dir, "last_check")
if not os.path.exists(last_check_file):
return True
try:
with open(last_check_file, encoding="utf-8") as f:
last_check = float(f.read().strip())
return (time.time() - last_check) > cls.UPDATE_CHECK_INTERVAL
except (OSError, ValueError):
return True
@classmethod
def _update_last_check(cls, pasls_dir: str) -> None:
"""Update last check timestamp."""
last_check_file = cls._meta_file(pasls_dir, "last_check")
try:
with open(last_check_file, "w", encoding="utf-8") as f:
f.write(str(time.time()))
except OSError as e:
log.warning(f"Failed to update last check time: {e}")
# ============== SHA256 Checksum ==============
@classmethod
def _get_checksums(cls) -> dict[str, str] | None:
"""Download checksums file from GitHub, return {filename: sha256} dict."""
checksums_url = f"{cls.PASLS_RELEASES_URL}/checksums.sha256"
try:
req = urllib.request.Request(checksums_url, headers={"User-Agent": "Serena-LSP"})
with urllib.request.urlopen(req, timeout=cls.NETWORK_TIMEOUT) as response:
content = response.read().decode("utf-8")
checksums = {}
for line in content.strip().split("\n"):
line = line.strip()
if not line or line.startswith("#"):
continue
parts = line.split()
if len(parts) >= 2:
sha256 = parts[0]
filename = parts[1].lstrip("*") # Remove possible * prefix
checksums[filename] = sha256
return checksums
except Exception as e:
log.warning(f"Failed to get checksums: {type(e).__name__}: {e}")
return None
@staticmethod
def _calculate_sha256(file_path: str) -> str:
"""Calculate SHA256 checksum of a file."""
sha256_hash = hashlib.sha256()
with open(file_path, "rb") as f:
for chunk in iter(lambda: f.read(8192), b""):
sha256_hash.update(chunk)
return sha256_hash.hexdigest()
@classmethod
def _verify_checksum(cls, file_path: str, expected_sha256: str) -> bool:
"""Verify file checksum."""
try:
actual_sha256 = cls._calculate_sha256(file_path)
if actual_sha256.lower() == expected_sha256.lower():
log.debug(f"Checksum verified: {file_path}")
return True
else:
log.error(f"Checksum mismatch for {file_path}: expected {expected_sha256}, got {actual_sha256}")
return False
except Exception as e:
log.error(f"Failed to verify checksum: {e}")
return False
# ============== Windows File Locking ==============
@staticmethod
def _is_file_locked(file_path: str) -> bool:
"""Check if file is locked (Windows)."""
if platform.system() != "Windows":
return False
if not os.path.exists(file_path):
return False
try:
with open(file_path, "a"):
pass
return False
except (OSError, PermissionError):
return True
@classmethod
def _safe_remove(cls, file_path: str) -> bool:
"""Safely remove file, handle Windows file locking."""
if not os.path.exists(file_path):
return True
if platform.system() == "Windows" and cls._is_file_locked(file_path):
temp_name = f"{file_path}.old.{uuid.uuid4().hex[:8]}"
try:
os.rename(file_path, temp_name)
log.info(f"File locked, renamed to: {temp_name}")
cls._mark_for_cleanup(os.path.dirname(file_path), temp_name)
return True
except PermissionError:
log.warning(f"Cannot remove/rename locked file: {file_path}")
return False
else:
try:
os.remove(file_path)
return True
except OSError as e:
log.warning(f"Failed to remove file {file_path}: {e}")
return False
@classmethod
def _mark_for_cleanup(cls, pasls_dir: str, file_path: str) -> None:
"""Mark file for later cleanup."""
cleanup_file = cls._meta_file(pasls_dir, "cleanup_list")
try:
with open(cleanup_file, "a", encoding="utf-8") as f:
f.write(file_path + "\n")
except OSError:
pass
@classmethod
def _cleanup_old_files(cls, pasls_dir: str) -> None:
"""Clean up old files marked for deletion."""
cleanup_file = cls._meta_file(pasls_dir, "cleanup_list")
if not os.path.exists(cleanup_file):
return
try:
with open(cleanup_file, encoding="utf-8") as f:
files = [line.strip() for line in f if line.strip()]
remaining = []
for file_path in files:
if os.path.exists(file_path):
try:
os.remove(file_path)
log.debug(f"Cleaned up old file: {file_path}")
except OSError:
remaining.append(file_path)
if remaining:
with open(cleanup_file, "w", encoding="utf-8") as f:
f.write("\n".join(remaining) + "\n")
else:
os.remove(cleanup_file)
except OSError:
pass
# ============== Download and Atomic Update ==============
@classmethod
def _download_archive(cls, url: str, target_path: str) -> bool:
"""Download archive to specified path."""
try:
os.makedirs(os.path.dirname(target_path), exist_ok=True)
req = urllib.request.Request(url, headers={"User-Agent": "Serena-LSP"})
with urllib.request.urlopen(req, timeout=60) as response:
with open(target_path, "wb") as f:
while True:
chunk = response.read(8192)
if not chunk:
break
f.write(chunk)
return True
except Exception as e:
log.error(f"Failed to download {url}: {type(e).__name__}: {e}")
return False
@classmethod
def _is_safe_tar_member(cls, member: tarfile.TarInfo, target_dir: str) -> bool:
"""Check if tar member is safe (prevent path traversal attack)."""
# Check for .. in path components
if ".." in member.name.split("/") or ".." in member.name.split("\\"):
return False
# Check extracted path is within target directory
abs_target = os.path.abspath(target_dir)
abs_member = os.path.abspath(os.path.join(target_dir, member.name))
return abs_member.startswith(abs_target + os.sep) or abs_member == abs_target
@classmethod
def _extract_archive(cls, archive_path: str, target_dir: str, archive_type: str) -> bool:
"""Safely extract archive to specified directory."""
try:
os.makedirs(target_dir, exist_ok=True)
if archive_type == "gztar":
with tarfile.open(archive_path, "r:gz") as tar:
for member in tar.getmembers():
if not cls._is_safe_tar_member(member, target_dir):
log.error(f"Unsafe tar member detected (path traversal): {member.name}")
return False
tar.extractall(target_dir)
elif archive_type == "zip":
with zipfile.ZipFile(archive_path, "r") as zip_ref:
for name in zip_ref.namelist():
if ".." in name.split("/") or ".." in name.split("\\"):
log.error(f"Unsafe zip member detected (path traversal): {name}")
return False
abs_target = os.path.abspath(target_dir)
abs_member = os.path.abspath(os.path.join(target_dir, name))
if not (abs_member.startswith(abs_target + os.sep) or abs_member == abs_target):
log.error(f"Unsafe zip member detected (path traversal): {name}")
return False
zip_ref.extractall(target_dir)
else:
log.error(f"Unsupported archive type: {archive_type}")
return False
# Handle nested directory: if extraction created a single subdirectory,
# move its contents up to target_dir (common with GitHub release archives)
cls._flatten_single_subdir(target_dir)
return True
except Exception as e:
log.error(f"Failed to extract archive: {type(e).__name__}: {e}")
return False
@classmethod
def _flatten_single_subdir(cls, target_dir: str) -> None:
"""If target_dir contains only a single subdirectory, move its contents up."""
entries = os.listdir(target_dir)
if len(entries) == 1:
subdir = os.path.join(target_dir, entries[0])
if os.path.isdir(subdir):
# Move all contents from subdir to target_dir
for item in os.listdir(subdir):
src = os.path.join(subdir, item)
dst = os.path.join(target_dir, item)
shutil.move(src, dst)
# Remove the now-empty subdirectory
os.rmdir(subdir)
@classmethod
def _get_archive_filename(cls, dep: RuntimeDependency) -> str:
"""Get archive filename from URL."""
assert dep.url is not None, "RuntimeDependency.url must be set"
return dep.url.split("/")[-1]
@classmethod
def _atomic_install(cls, pasls_dir: str, deps: RuntimeDependencyCollection, checksums: dict[str, str] | None) -> bool:
"""Atomic update: download -> verify checksum -> extract -> replace."""
temp_dir = pasls_dir + ".tmp"
backup_dir = pasls_dir + ".backup"
temp_archive_dir = os.path.join(os.path.expanduser("~"), "solidlsp_tmp")
try:
dep = deps.get_single_dep_for_current_platform()
assert dep.url is not None, "RuntimeDependency.url must be set"
assert dep.archive_type is not None, "RuntimeDependency.archive_type must be set"
archive_filename = cls._get_archive_filename(dep)
archive_path = os.path.join(temp_archive_dir, archive_filename)
# 1. Clean up any existing temp directory
if os.path.exists(temp_dir):
shutil.rmtree(temp_dir)
os.makedirs(temp_archive_dir, exist_ok=True)
# 2. Download archive
log.info(f"Downloading pasls archive: {archive_filename}")
if not cls._download_archive(dep.url, archive_path):
log.error("Failed to download pasls archive")
return False
# 3. Verify SHA256 checksum (critical security step, before extraction)
if checksums:
expected_sha256 = checksums.get(archive_filename)
if expected_sha256:
log.info(f"Verifying SHA256 checksum for {archive_filename}...")
if not cls._verify_checksum(archive_path, expected_sha256):
log.error(f"SHA256 checksum verification FAILED for {archive_filename}")
log.error("Aborting installation due to checksum mismatch - possible security issue!")
try:
os.remove(archive_path)
except OSError:
pass
return False
log.info("SHA256 checksum verified successfully")
else:
log.warning(f"No checksum found for {archive_filename} in checksums file")
else:
log.warning("No checksums available - skipping verification (not recommended for production)")
# 4. Extract to temp directory
os.makedirs(temp_dir, exist_ok=True)
log.info("Extracting archive to temporary directory...")
if not cls._extract_archive(archive_path, temp_dir, dep.archive_type):
log.error("Failed to extract archive")
return False
# 5. Set execute permission
binary_path = deps.binary_path(temp_dir)
if os.path.exists(binary_path):
try:
os.chmod(binary_path, 0o755)
except OSError:
pass # May fail on Windows
# 6. Backup old version
if os.path.exists(pasls_dir):
if os.path.exists(backup_dir):
shutil.rmtree(backup_dir)
shutil.move(pasls_dir, backup_dir)
# 7. Replace with new version
shutil.move(temp_dir, pasls_dir)
# 8. Restore meta directory from backup (preserves version info, last_check, etc.)
if os.path.exists(backup_dir):
backup_meta = os.path.join(backup_dir, cls.META_DIR)
if os.path.exists(backup_meta):
target_meta = os.path.join(pasls_dir, cls.META_DIR)
if not os.path.exists(target_meta):
shutil.copytree(backup_meta, target_meta)
# 9. Clean up downloaded archive and temp directory
try:
os.remove(archive_path)
os.rmdir(temp_archive_dir)
except OSError:
pass
log.info("pasls installation completed successfully")
return True
except Exception as e:
log.error(f"Installation failed: {e}")
# Rollback
if os.path.exists(backup_dir) and not os.path.exists(pasls_dir):
try:
shutil.move(backup_dir, pasls_dir)
log.info("Rolled back to previous version")
except Exception as rollback_error:
log.error(f"Rollback failed: {rollback_error}")
# Clean up temp directory
if os.path.exists(temp_dir):
try:
shutil.rmtree(temp_dir)
except Exception:
pass
return False
@classmethod
def _setup_runtime_dependencies(cls, solidlsp_settings: SolidLSPSettings) -> str:
"""
Setup runtime dependencies for Pascal Language Server (pasls).
Automatically checks for updates every 24 hours with security verification.
Returns:
str: The command to start the pasls server
"""
# Check if pasls is already in PATH
pasls_in_path = shutil.which("pasls")
if pasls_in_path:
log.info(f"Found pasls in PATH: {pasls_in_path}")
return quote_windows_path(pasls_in_path)
pasls_dir = cls.ls_resources_dir(solidlsp_settings)
os.makedirs(pasls_dir, exist_ok=True)
# Clean up old files from previous sessions
cls._cleanup_old_files(pasls_dir)
# Use RuntimeDependencyCollection for platform detection
# Asset names follow zen010101/pascal-language-server release convention:
# pasls-{cpu_arch}-{os}.{ext} where cpu_arch is x86_64/aarch64/i386
deps = RuntimeDependencyCollection(
[
RuntimeDependency(
id="PascalLanguageServer",
description="Pascal Language Server for Linux (x64)",
url=f"{cls.PASLS_RELEASES_URL}/pasls-x86_64-linux.tar.gz",
platform_id="linux-x64",
archive_type="gztar",
binary_name="pasls",
),
RuntimeDependency(
id="PascalLanguageServer",
description="Pascal Language Server for Linux (arm64)",
url=f"{cls.PASLS_RELEASES_URL}/pasls-aarch64-linux.tar.gz",
platform_id="linux-arm64",
archive_type="gztar",
binary_name="pasls",
),
RuntimeDependency(
id="PascalLanguageServer",
description="Pascal Language Server for macOS (x64)",
url=f"{cls.PASLS_RELEASES_URL}/pasls-x86_64-darwin.zip",
platform_id="osx-x64",
archive_type="zip",
binary_name="pasls",
),
RuntimeDependency(
id="PascalLanguageServer",
description="Pascal Language Server for macOS (arm64)",
url=f"{cls.PASLS_RELEASES_URL}/pasls-aarch64-darwin.zip",
platform_id="osx-arm64",
archive_type="zip",
binary_name="pasls",
),
RuntimeDependency(
id="PascalLanguageServer",
description="Pascal Language Server for Windows (x64)",
url=f"{cls.PASLS_RELEASES_URL}/pasls-x86_64-win64.zip",
platform_id="win-x64",
archive_type="zip",
binary_name="pasls.exe",
),
]
)
pasls_executable_path = deps.binary_path(pasls_dir)
# Determine if download is needed
need_download = False
latest_version = None
checksums = None
if not os.path.exists(pasls_executable_path):
# First install
log.info("pasls not found, will download...")
need_download = True
latest_version = cls._get_latest_version()
checksums = cls._get_checksums()
elif cls._should_check_update(pasls_dir):
# Check for updates
log.debug("Checking for pasls updates...")
latest_version = cls._get_latest_version()
local_version = cls._get_local_version(pasls_dir)
if cls._is_newer_version(latest_version, local_version):
log.info(f"New pasls version available: {latest_version} (current: {local_version})")
# Check Windows file locking
if cls._is_file_locked(pasls_executable_path):
log.warning("Cannot update pasls: file is in use. Will retry next time.")
else:
need_download = True
checksums = cls._get_checksums()
else:
log.debug(f"pasls is up to date: {local_version}")
if need_download:
if cls._atomic_install(pasls_dir, deps, checksums):
# Update metadata after successful installation
if latest_version:
cls._save_local_version(pasls_dir, latest_version)
else:
# API failed but download succeeded, record placeholder version
cls._save_local_version(pasls_dir, "unknown")
cls._update_last_check(pasls_dir)
else:
# Installation failed, use existing version if available
if not os.path.exists(pasls_executable_path):
raise RuntimeError("Failed to install pasls and no local version available")
log.warning("Update failed, using existing version")
# Update check time even if no update (avoid frequent checks)
if not need_download and cls._should_check_update(pasls_dir):
cls._update_last_check(pasls_dir)
assert os.path.exists(pasls_executable_path), f"pasls executable not found at {pasls_executable_path}"
# Ensure execute permission
try:
os.chmod(pasls_executable_path, 0o755)
except OSError:
pass # May fail on Windows, ignore
log.info(f"Using pasls at: {pasls_executable_path}")
return quote_windows_path(pasls_executable_path)
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the Pascal Language Server.
pasls (genericptr/pascal-language-server) reads compiler paths from:
1. Environment variables (PP, FPCDIR, LAZARUSDIR) via TCodeToolsOptions.InitWithEnvironmentVariables
2. Lazarus config files via GuessCodeToolConfig
We only pass target OS/CPU in initializationOptions if explicitly set.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
# Build initializationOptions from environment variables
# pasls reads these to configure CodeTools:
# - PP: Path to FPC compiler executable
# - FPCDIR: Path to FPC source directory
# - LAZARUSDIR: Path to Lazarus directory (only needed for LCL projects)
# - FPCTARGET: Target OS
# - FPCTARGETCPU: Target CPU
initialization_options: dict = {}
env_vars = ["PP", "FPCDIR", "LAZARUSDIR", "FPCTARGET", "FPCTARGETCPU"]
for var in env_vars:
value = os.environ.get(var, "")
if value:
initialization_options[var] = value
initialize_params = {
"locale": "en",
"capabilities": {
"textDocument": {
"synchronization": {
"didSave": True,
"dynamicRegistration": True,
"willSave": True,
"willSaveWaitUntil": True,
},
"completion": {
"dynamicRegistration": True,
"completionItem": {
"snippetSupport": True,
"commitCharactersSupport": True,
"documentationFormat": ["markdown", "plaintext"],
},
},
"hover": {
"dynamicRegistration": True,
"contentFormat": ["markdown", "plaintext"],
},
"signatureHelp": {
"dynamicRegistration": True,
"signatureInformation": {
"documentationFormat": ["markdown", "plaintext"],
},
},
"definition": {"dynamicRegistration": True, "linkSupport": True},
"references": {"dynamicRegistration": True},
"documentHighlight": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
"codeAction": {
"dynamicRegistration": True,
"codeActionLiteralSupport": {
"codeActionKind": {
"valueSet": [
"quickfix",
"refactor",
"refactor.extract",
"refactor.inline",
"refactor.rewrite",
"source",
"source.organizeImports",
]
}
},
},
"formatting": {"dynamicRegistration": True},
"rangeFormatting": {"dynamicRegistration": True},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"symbol": {"dynamicRegistration": True},
"executeCommand": {"dynamicRegistration": True},
"configuration": True,
"workspaceEdit": {
"documentChanges": True,
},
},
},
"initializationOptions": initialization_options,
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
}
return initialize_params # type: ignore
def _start_server(self) -> None:
"""
Starts the Pascal Language Server, waits for the server to be ready and yields the LanguageServer instance.
"""
def register_capability_handler(params: dict) -> None:
log.debug(f"Capability registered: {params}")
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
# Mark server as ready when we see initialization messages
message_text = msg.get("message", "")
if "initialized" in message_text.lower() or "ready" in message_text.lower():
log.info("Pascal language server ready signal detected")
self.server_ready.set()
def publish_diagnostics(params: dict) -> None:
log.debug(f"Diagnostics: {params}")
return
def do_nothing(params: dict) -> None:
return
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("window/showMessage", window_log_message)
self.server.on_notification("textDocument/publishDiagnostics", publish_diagnostics)
self.server.on_notification("$/progress", do_nothing)
log.info("Starting Pascal server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
log.debug(f"Received initialize response from Pascal server: {init_response}")
# Verify capabilities
capabilities = init_response.get("capabilities", {})
assert "textDocumentSync" in capabilities
# Check for various capabilities
if "completionProvider" in capabilities:
log.info("Pascal server supports code completion")
if "definitionProvider" in capabilities:
log.info("Pascal server supports go to definition")
if "referencesProvider" in capabilities:
log.info("Pascal server supports find references")
if "documentSymbolProvider" in capabilities:
log.info("Pascal server supports document symbols")
self.server.notify.initialized({})
# Wait for server readiness with timeout
log.info("Waiting for Pascal language server to be ready...")
if not self.server_ready.wait(timeout=5.0):
# pasls may not send explicit ready signals, so we proceed after timeout
log.info("Timeout waiting for Pascal server ready signal, assuming server is ready")
self.server_ready.set()
else:
log.info("Pascal server initialization complete")
def is_ignored_dirname(self, dirname: str) -> bool:
"""
Check if a directory should be ignored for Pascal projects.
Common Pascal/Lazarus directories to ignore.
"""
ignored_dirs = {
"lib",
"backup",
"__history",
"__recovery",
"bin",
".git",
".svn",
".hg",
"node_modules",
}
return dirname.lower() in ignored_dirs
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/pascal_server.py",
"license": "MIT License",
"lines": 796,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:test/solidlsp/pascal/test_pascal_basic.py | """
Basic integration tests for the Pascal language server functionality.
These tests validate the functionality of the language server APIs
like request_document_symbols using the Pascal test repository.
Uses genericptr/pascal-language-server which returns SymbolInformation[] format:
- Returns classes, structs, enums, typedefs, functions/procedures
- Uses correct SymbolKind values: Class=5, Function=12, Method=6, Struct=23
- Method names don't include parent class prefix; uses containerName instead
"""
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
from solidlsp.ls_types import SymbolKind
from test.conftest import language_tests_enabled
pytestmark = [
pytest.mark.pascal,
pytest.mark.skipif(not language_tests_enabled(Language.PASCAL), reason="Pascal tests are disabled (pasls/fpc not available)"),
]
@pytest.mark.pascal
class TestPascalLanguageServerBasics:
"""Test basic functionality of the Pascal language server."""
@pytest.mark.parametrize("language_server", [Language.PASCAL], indirect=True)
def test_pascal_language_server_initialization(self, language_server: SolidLanguageServer) -> None:
"""Test that Pascal language server can be initialized successfully."""
assert language_server is not None
assert language_server.language == Language.PASCAL
@pytest.mark.parametrize("language_server", [Language.PASCAL], indirect=True)
def test_pascal_request_document_symbols(self, language_server: SolidLanguageServer) -> None:
"""Test request_document_symbols for Pascal files.
genericptr pasls returns proper SymbolKind values:
- Standalone functions: kind=12 (Function)
- Classes: kind=5 (Class)
"""
# Test getting symbols from main.pas
all_symbols, _root_symbols = language_server.request_document_symbols("main.pas").get_all_symbols_and_roots()
# Should have symbols
assert len(all_symbols) > 0, "Should have symbols in main.pas"
# Should detect standalone functions (SymbolKind.Function = 12)
function_symbols = [s for s in all_symbols if s.get("kind") == SymbolKind.Function]
function_names = [s["name"] for s in function_symbols]
assert "CalculateSum" in function_names, "Should find CalculateSum function"
assert "PrintMessage" in function_names, "Should find PrintMessage procedure"
# Should detect classes (SymbolKind.Class = 5)
class_symbols = [s for s in all_symbols if s.get("kind") == SymbolKind.Class]
class_names = [s["name"] for s in class_symbols]
assert "TUser" in class_names, "Should find TUser class"
assert "TUserManager" in class_names, "Should find TUserManager class"
@pytest.mark.parametrize("language_server", [Language.PASCAL], indirect=True)
def test_pascal_class_methods(self, language_server: SolidLanguageServer) -> None:
"""Test detection of class methods in Pascal files.
pasls returns class methods with SymbolKind.Method (kind 6), not Function (kind 12).
"""
all_symbols, _root_symbols = language_server.request_document_symbols("main.pas").get_all_symbols_and_roots()
# Get all method symbols (pasls returns class methods as SymbolKind.Method = 6)
method_symbols = [s for s in all_symbols if s.get("kind") == SymbolKind.Method]
method_names = [s["name"] for s in method_symbols]
# Should detect TUser methods
expected_tuser_methods = ["Create", "Destroy", "GetInfo", "UpdateAge"]
for method in expected_tuser_methods:
found = method in method_names
assert found, f"Should find method '{method}'"
# Should detect TUserManager methods
expected_manager_methods = ["Create", "Destroy", "AddUser", "GetUserCount", "FindUserByName"]
for method in expected_manager_methods:
found = method in method_names
assert found, f"Should find method '{method}'"
@pytest.mark.parametrize("language_server", [Language.PASCAL], indirect=True)
def test_pascal_helper_unit_symbols(self, language_server: SolidLanguageServer) -> None:
"""Test function detection in Helper unit."""
# Test with lib/helper.pas
helper_all_symbols, _helper_root_symbols = language_server.request_document_symbols("lib/helper.pas").get_all_symbols_and_roots()
# Should have symbols
assert len(helper_all_symbols) > 0, "Helper unit should have symbols"
# Extract function symbols
function_symbols = [s for s in helper_all_symbols if s.get("kind") == SymbolKind.Function]
function_names = [s["name"] for s in function_symbols]
# Should detect standalone functions
expected_functions = ["GetHelperMessage", "MultiplyNumbers", "LogMessage"]
for func_name in expected_functions:
assert func_name in function_names, f"Should find {func_name} function in Helper unit"
# Should also detect THelper class methods (returned as SymbolKind.Method = 6)
method_symbols = [s for s in helper_all_symbols if s.get("kind") == SymbolKind.Method]
method_names = [s["name"] for s in method_symbols]
assert "FormatString" in method_names, "Should find FormatString method"
assert "IsEven" in method_names, "Should find IsEven method"
@pytest.mark.parametrize("language_server", [Language.PASCAL], indirect=True)
def test_pascal_cross_file_references(self, language_server: SolidLanguageServer) -> None:
"""Test that Pascal LSP can handle cross-file references."""
# main.pas uses Helper unit
main_symbols, _main_roots = language_server.request_document_symbols("main.pas").get_all_symbols_and_roots()
helper_symbols, _helper_roots = language_server.request_document_symbols("lib/helper.pas").get_all_symbols_and_roots()
# Verify both files have symbols
assert len(main_symbols) > 0, "main.pas should have symbols"
assert len(helper_symbols) > 0, "helper.pas should have symbols"
# Verify GetHelperMessage is in Helper unit
helper_function_names = [s["name"] for s in helper_symbols if s.get("kind") == SymbolKind.Function]
assert "GetHelperMessage" in helper_function_names, "Helper unit should export GetHelperMessage"
@pytest.mark.parametrize("language_server", [Language.PASCAL], indirect=True)
def test_pascal_symbol_locations(self, language_server: SolidLanguageServer) -> None:
"""Test that symbols have correct location information.
Note: genericptr pasls returns the interface declaration location (line ~41),
not the implementation location (line ~115).
"""
all_symbols, _root_symbols = language_server.request_document_symbols("main.pas").get_all_symbols_and_roots()
# Find CalculateSum function
calc_symbols = [s for s in all_symbols if s.get("name") == "CalculateSum"]
assert len(calc_symbols) > 0, "Should find CalculateSum"
calc_symbol = calc_symbols[0]
# Verify it has location information (SymbolInformation format uses location.range)
if "location" in calc_symbol:
location = calc_symbol["location"]
assert "range" in location, "Location should have range"
assert "start" in location["range"], "Range should have start"
assert "line" in location["range"]["start"], "Start should have line"
line = location["range"]["start"]["line"]
else:
# DocumentSymbol format uses range directly
assert "range" in calc_symbol, "Symbol should have range"
assert "start" in calc_symbol["range"], "Range should have start"
line = calc_symbol["range"]["start"]["line"]
# CalculateSum is declared at line 41 in main.pas (0-indexed would be 40)
# genericptr pasls returns interface declaration location
assert 35 <= line <= 45, f"CalculateSum should be around line 41 (interface), got {line}"
@pytest.mark.parametrize("language_server", [Language.PASCAL], indirect=True)
def test_pascal_namespace_symbol(self, language_server: SolidLanguageServer) -> None:
"""Test that genericptr pasls returns Interface namespace symbol."""
all_symbols, _root_symbols = language_server.request_document_symbols("main.pas").get_all_symbols_and_roots()
# genericptr pasls adds an "Interface" namespace symbol
symbol_names = [s["name"] for s in all_symbols]
# The Interface section should be represented
# Note: This depends on pasls configuration
assert len(all_symbols) > 0, "Should have symbols"
# Interface namespace may or may not be present depending on pasls configuration
_ = symbol_names # used for potential future assertions
@pytest.mark.parametrize("language_server", [Language.PASCAL], indirect=True)
def test_pascal_hover_with_doc_comments(self, language_server: SolidLanguageServer) -> None:
"""Test that hover returns documentation comments.
CalculateSum has /// style doc comments that should appear in hover.
"""
# CalculateSum is declared at line 46 (1-indexed), so line 45 (0-indexed)
hover = language_server.request_hover("main.pas", 45, 12)
assert hover is not None, "Hover should return a result"
# Extract hover content - handle both dict and object formats
if isinstance(hover, dict):
contents = hover.get("contents", {})
value = contents.get("value", "") if isinstance(contents, dict) else str(contents)
else:
value = hover.contents.value if hasattr(hover.contents, "value") else str(hover.contents)
# Should contain the function signature
assert "CalculateSum" in value, f"Hover should show function name. Got: {value[:500]}"
# Should contain the doc comment
assert "Calculates the sum" in value, f"Hover should include doc comment. Got: {value[:500]}"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/pascal/test_pascal_basic.py",
"license": "MIT License",
"lines": 152,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/test_lsp_protocol_handler_server.py | """
Tests for JSON-RPC 2.0 params field handling in LSP protocol.
These tests verify the correct handling of the params field in LSP requests and notifications,
specifically ensuring:
- Void-type methods (shutdown, exit) omit params field entirely
- Methods with explicit params include them unchanged
- Methods with None params receive params: {} for Delphi/FPC compatibility
Reference: JSON-RPC 2.0 spec - params field is optional but must be object/array when present.
"""
from typing import Any
import pytest
from solidlsp.lsp_protocol_handler.server import make_notification, make_request
# =============================================================================
# Shared Assertion Helpers (DRY extraction per AI Panel recommendation)
# =============================================================================
def assert_jsonrpc_structure(
result: dict[str, Any],
expected_method: str,
expected_keys: set[str],
*,
expected_id: Any | None = None,
) -> None:
"""Verify JSON-RPC 2.0 structural requirements with 5-point error messages.
Args:
result: The dict returned by make_request/make_notification
expected_method: The method name that should be in the result
expected_keys: Exact set of keys that should be present
expected_id: If provided, verify the id field matches (for requests)
"""
# Verify jsonrpc field
assert "jsonrpc" in result, (
f"STRUCTURE ERROR: Missing required 'jsonrpc' field.\n"
f"Expected: jsonrpc='2.0'\n"
f"Actual keys: {list(result.keys())}\n"
f"GUIDANCE: All JSON-RPC 2.0 messages must include jsonrpc field."
)
assert result["jsonrpc"] == "2.0", (
f"STRUCTURE ERROR: Invalid jsonrpc version.\n"
f"Expected: '2.0'\n"
f"Actual: {result['jsonrpc']!r}\n"
f"GUIDANCE: JSON-RPC 2.0 requires jsonrpc='2.0' exactly."
)
# Verify method field
assert "method" in result, (
f"STRUCTURE ERROR: Missing required 'method' field.\n"
f"Expected: method='{expected_method}'\n"
f"Actual keys: {list(result.keys())}\n"
f"GUIDANCE: All requests/notifications must include method field."
)
assert result["method"] == expected_method, (
f"STRUCTURE ERROR: Method mismatch.\n"
f"Expected: '{expected_method}'\n"
f"Actual: {result['method']!r}\n"
f"GUIDANCE: Method field must match the requested method name."
)
# Verify id field if expected (requests only)
if expected_id is not None:
assert "id" in result, (
f"STRUCTURE ERROR: Missing required 'id' field for request.\n"
f"Expected: id={expected_id!r}\n"
f"Actual keys: {list(result.keys())}\n"
f"GUIDANCE: JSON-RPC 2.0 requests must include id field."
)
assert result["id"] == expected_id, (
f"STRUCTURE ERROR: Request ID mismatch.\n"
f"Expected: {expected_id!r}\n"
f"Actual: {result['id']!r}\n"
f"GUIDANCE: Request ID must be preserved exactly as provided."
)
# Verify exact key set
actual_keys = set(result.keys())
if actual_keys != expected_keys:
extra = sorted(actual_keys - expected_keys)
missing = sorted(expected_keys - actual_keys)
pytest.fail(
f"STRUCTURE ERROR: Key set mismatch for method '{expected_method}'.\n"
f"Expected keys: {sorted(expected_keys)}\n"
f"Actual keys: {sorted(actual_keys)}\n"
f"Extra keys: {extra}\n"
f"Missing keys: {missing}\n"
f"GUIDANCE: Verify key construction logic for Void-type vs normal methods."
)
def assert_params_omitted(result: dict[str, Any], method: str, req_id: str, input_params: Any = None) -> None:
"""Assert that params field is NOT present (for Void-type methods).
Args:
result: The dict returned by make_request/make_notification
method: Method name for error message context
req_id: Requirement ID (e.g., 'REQ-1', 'REQ-AI-PANEL-GAP')
input_params: If provided, shows what params were passed (for explicit params tests)
"""
if "params" in result:
input_note = f"\nInput params: {input_params}" if input_params is not None else ""
pytest.fail(
f"{req_id} VIOLATED: {method} method MUST omit params field entirely.{input_note}\n"
f"Expected: No 'params' key in result\n"
f"Actual: params={result.get('params')!r}\n"
f"Actual keys: {list(result.keys())}\n"
f"REASON: HLS/rust-analyzer Void types reject any params field (even empty object).\n"
f"GUIDANCE: Void-type constraint takes precedence - implementation must omit params entirely."
)
def assert_params_equal(result: dict[str, Any], expected_params: Any, req_id: str) -> None:
"""Assert that params field equals expected value.
Args:
result: The dict returned by make_request/make_notification
expected_params: The exact params value expected
req_id: Requirement ID for error message context
"""
if "params" not in result:
pytest.fail(
f"{req_id} VIOLATED: params field missing.\n"
f"Expected: params={expected_params!r}\n"
f"Actual keys: {list(result.keys())}\n"
f"GUIDANCE: Non-Void methods must include params field."
)
if result["params"] != expected_params:
pytest.fail(
f"{req_id} VIOLATED: params value mismatch.\n"
f"Expected: {expected_params!r}\n"
f"Actual: {result['params']!r}\n"
f"GUIDANCE: Params must be included exactly as provided (or {{}} for None)."
)
class TestMakeNotificationParamsHandling:
"""Test make_notification() params field handling per JSON-RPC 2.0 spec."""
def test_shutdown_method_omits_params_entirely(self) -> None:
"""REQ-1: Void-type method 'shutdown' MUST omit params field entirely."""
result = make_notification("shutdown", None)
assert_jsonrpc_structure(result, "shutdown", {"jsonrpc", "method"})
assert_params_omitted(result, "shutdown", "REQ-1")
def test_exit_method_omits_params_entirely(self) -> None:
"""REQ-1: Void-type method 'exit' MUST omit params field entirely."""
result = make_notification("exit", None)
assert_jsonrpc_structure(result, "exit", {"jsonrpc", "method"})
assert_params_omitted(result, "exit", "REQ-1")
def test_notification_with_explicit_params_dict(self) -> None:
"""REQ-2: Methods with explicit params MUST include them unchanged."""
test_params = {"uri": "file:///test.py", "languageId": "python"}
result = make_notification("textDocument/didOpen", test_params)
assert_jsonrpc_structure(result, "textDocument/didOpen", {"jsonrpc", "method", "params"})
assert_params_equal(result, test_params, "REQ-2")
def test_notification_with_explicit_params_list(self) -> None:
"""REQ-2: Methods with explicit params (list) MUST include them unchanged."""
test_params = ["arg1", "arg2", "arg3"]
result = make_notification("custom/method", test_params)
assert_jsonrpc_structure(result, "custom/method", {"jsonrpc", "method", "params"})
assert_params_equal(result, test_params, "REQ-2")
def test_notification_with_none_params_sends_empty_dict(self) -> None:
"""REQ-3: Methods with None params MUST send params: {} (Delphi/FPC compat)."""
result = make_notification("textDocument/didChange", None)
assert_jsonrpc_structure(result, "textDocument/didChange", {"jsonrpc", "method", "params"})
assert_params_equal(result, {}, "REQ-3")
def test_notification_with_empty_dict_params(self) -> None:
"""REQ-2: Explicit empty dict params MUST be included unchanged."""
result = make_notification("custom/notify", {})
assert_jsonrpc_structure(result, "custom/notify", {"jsonrpc", "method", "params"})
assert_params_equal(result, {}, "REQ-2")
class TestMakeRequestParamsHandling:
"""Test make_request() params field handling per JSON-RPC 2.0 spec."""
def test_shutdown_request_omits_params_entirely(self) -> None:
"""REQ-1: Void-type method 'shutdown' MUST omit params field entirely (requests)."""
result = make_request("shutdown", request_id=1, params=None)
assert_jsonrpc_structure(result, "shutdown", {"jsonrpc", "method", "id"}, expected_id=1)
assert_params_omitted(result, "shutdown", "REQ-1")
def test_request_with_explicit_params_dict(self) -> None:
"""REQ-2: Requests with explicit params MUST include them unchanged."""
test_params = {"textDocument": {"uri": "file:///test.py"}, "position": {"line": 10, "character": 5}}
result = make_request("textDocument/hover", request_id=42, params=test_params)
assert_jsonrpc_structure(result, "textDocument/hover", {"jsonrpc", "method", "id", "params"}, expected_id=42)
assert_params_equal(result, test_params, "REQ-2")
def test_request_with_none_params_sends_empty_dict(self) -> None:
"""REQ-3: Requests with None params MUST send params: {} (Delphi/FPC compat)."""
result = make_request("workspace/configuration", request_id=100, params=None)
assert_jsonrpc_structure(result, "workspace/configuration", {"jsonrpc", "method", "id", "params"}, expected_id=100)
assert_params_equal(result, {}, "REQ-3")
def test_request_id_preservation(self) -> None:
"""Verify request_id is correctly included in result (string ID)."""
test_id = "unique-request-123"
result = make_request("custom/request", request_id=test_id, params={"key": "value"})
assert_jsonrpc_structure(result, "custom/request", {"jsonrpc", "method", "id", "params"}, expected_id=test_id)
def test_request_with_explicit_params_list(self) -> None:
"""REQ-2: Requests with explicit params (list) MUST include them unchanged."""
test_params = [1, 2, 3]
result = make_request("custom/sum", request_id=99, params=test_params)
assert_jsonrpc_structure(result, "custom/sum", {"jsonrpc", "method", "id", "params"}, expected_id=99)
assert_params_equal(result, test_params, "REQ-2")
class TestVoidMethodsExhaustive:
"""Test all methods that should be treated as Void-type (no params)."""
def test_shutdown_request_ignores_explicit_params_dict(self) -> None:
"""REQ-AI-PANEL-GAP: shutdown MUST omit params even when caller explicitly provides params."""
explicit_params = {"key": "value", "another": "param"}
result = make_request("shutdown", request_id=1, params=explicit_params)
assert_jsonrpc_structure(result, "shutdown", {"jsonrpc", "method", "id"}, expected_id=1)
assert_params_omitted(result, "shutdown", "REQ-AI-PANEL-GAP", input_params=explicit_params)
def test_exit_notification_ignores_explicit_params(self) -> None:
"""REQ-AI-PANEL-GAP: exit MUST omit params even when caller explicitly provides params."""
explicit_params = {"unexpected": "params"}
result = make_notification("exit", explicit_params)
assert_jsonrpc_structure(result, "exit", {"jsonrpc", "method"})
assert_params_omitted(result, "exit", "REQ-AI-PANEL-GAP", input_params=explicit_params)
def test_only_shutdown_and_exit_are_void_methods(self) -> None:
"""REQ-BOUNDARY: Verify EXACTLY shutdown/exit are Void-type - no more, no less."""
# Positive verification: shutdown and exit MUST omit params
shutdown_notif = make_notification("shutdown", None)
exit_notif = make_notification("exit", None)
shutdown_req = make_request("shutdown", 1, None)
assert "params" not in shutdown_notif, "shutdown notification should omit params"
assert "params" not in exit_notif, "exit notification should omit params"
assert "params" not in shutdown_req, "shutdown request should omit params"
# Negative verification: other methods MUST include params (even when None -> {})
non_void_methods = [
"initialize",
"initialized",
"textDocument/didOpen",
"textDocument/didChange",
"textDocument/didClose",
"workspace/didChangeConfiguration",
"workspace/didChangeWatchedFiles",
]
for method in non_void_methods:
result_notif = make_notification(method, None)
result_req = make_request(method, 1, None)
if "params" not in result_notif:
pytest.fail(
f"BOUNDARY VIOLATION: '{method}' notification treated as Void-type.\n"
f"Expected: params field present (should be {{}})\n"
f"Actual keys: {list(result_notif.keys())}\n"
f"GUIDANCE: Only 'shutdown' and 'exit' should omit params field."
)
assert_params_equal(result_notif, {}, f"REQ-3 ({method} notification)")
if "params" not in result_req:
pytest.fail(
f"BOUNDARY VIOLATION: '{method}' request treated as Void-type.\n"
f"Expected: params field present (should be {{}})\n"
f"Actual keys: {list(result_req.keys())}\n"
f"GUIDANCE: Only 'shutdown' and 'exit' should omit params field."
)
assert_params_equal(result_req, {}, f"REQ-3 ({method} request)")
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/test_lsp_protocol_handler_server.py",
"license": "MIT License",
"lines": 236,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/rust/test_rust_analyzer_detection.py | """
Tests for rust-analyzer detection logic.
These tests describe the expected behavior of RustAnalyzer._ensure_rust_analyzer_installed():
1. Rustup should be checked FIRST (avoids picking up incorrect PATH aliases)
2. Common installation locations (Homebrew, cargo, Scoop) should be checked as fallback
3. System PATH should be checked last (can pick up incompatible versions)
4. Error messages should list all searched locations
5. Windows-specific paths should be checked on Windows
WHY these tests matter:
- Users install rust-analyzer via Homebrew, cargo, Scoop, or system packages - not just rustup
- macOS Homebrew installs to /opt/homebrew/bin (Apple Silicon) or /usr/local/bin (Intel)
- Windows users install via Scoop, Chocolatey, or cargo
- Detection failing means Serena is unusable for Rust, even when rust-analyzer is correctly installed
- Without these tests, the detection logic can silently break for non-rustup users
"""
import os
import pathlib
import sys
from unittest.mock import MagicMock, patch
import pytest
# Platform detection for skipping platform-specific tests
IS_WINDOWS = sys.platform == "win32"
IS_UNIX = sys.platform != "win32"
class TestRustAnalyzerDetection:
"""Unit tests for rust-analyzer binary detection logic."""
@pytest.mark.rust
def test_detect_from_path_as_last_resort(self):
"""
GIVEN rustup is not available
AND rust-analyzer is NOT in common locations (Homebrew, cargo)
AND rust-analyzer IS in system PATH
WHEN _ensure_rust_analyzer_installed is called
THEN it should return the path from shutil.which as last resort
WHY: PATH is checked last to avoid picking up incorrect aliases.
Users with rust-analyzer in PATH but not via rustup/common locations
should still work.
"""
from solidlsp.language_servers.rust_analyzer import RustAnalyzer
# Mock rustup to be unavailable
with patch.object(RustAnalyzer.DependencyProvider, "_get_rust_analyzer_via_rustup", return_value=None):
# Mock common locations to NOT exist
with patch("os.path.isfile", return_value=False):
# Mock PATH to have rust-analyzer
with patch("shutil.which") as mock_which:
mock_which.return_value = "/custom/bin/rust-analyzer"
with patch("os.access", return_value=True):
# Need isfile to return True for PATH result only
def selective_isfile(path):
return path == "/custom/bin/rust-analyzer"
with patch("os.path.isfile", side_effect=selective_isfile):
result = RustAnalyzer.DependencyProvider._ensure_rust_analyzer_installed()
assert result == "/custom/bin/rust-analyzer"
mock_which.assert_called_with("rust-analyzer")
@pytest.mark.rust
@pytest.mark.skipif(IS_WINDOWS, reason="Homebrew paths only apply to macOS/Linux")
def test_detect_from_homebrew_apple_silicon_path(self):
"""
GIVEN rustup is NOT available
AND rust-analyzer is installed via Homebrew on Apple Silicon Mac
AND it is NOT in PATH (shutil.which returns None)
WHEN _ensure_rust_analyzer_installed is called
THEN it should find /opt/homebrew/bin/rust-analyzer
WHY: Apple Silicon Macs use /opt/homebrew/bin for Homebrew.
This path should be checked as fallback when rustup is unavailable.
"""
from solidlsp.language_servers.rust_analyzer import RustAnalyzer
def mock_isfile(path):
return path == "/opt/homebrew/bin/rust-analyzer"
def mock_access(path, mode):
return path == "/opt/homebrew/bin/rust-analyzer"
with patch.object(RustAnalyzer.DependencyProvider, "_get_rust_analyzer_via_rustup", return_value=None):
with patch("shutil.which", return_value=None):
with patch("os.path.isfile", side_effect=mock_isfile):
with patch("os.access", side_effect=mock_access):
result = RustAnalyzer.DependencyProvider._ensure_rust_analyzer_installed()
assert result == "/opt/homebrew/bin/rust-analyzer"
@pytest.mark.rust
@pytest.mark.skipif(IS_WINDOWS, reason="Homebrew paths only apply to macOS/Linux")
def test_detect_from_homebrew_intel_path(self):
"""
GIVEN rustup is NOT available
AND rust-analyzer is installed via Homebrew on Intel Mac
AND it is NOT in PATH
WHEN _ensure_rust_analyzer_installed is called
THEN it should find /usr/local/bin/rust-analyzer
WHY: Intel Macs use /usr/local/bin for Homebrew.
Linux systems may also install to this location.
"""
from solidlsp.language_servers.rust_analyzer import RustAnalyzer
def mock_isfile(path):
return path == "/usr/local/bin/rust-analyzer"
def mock_access(path, mode):
return path == "/usr/local/bin/rust-analyzer"
with patch.object(RustAnalyzer.DependencyProvider, "_get_rust_analyzer_via_rustup", return_value=None):
with patch("shutil.which", return_value=None):
with patch("os.path.isfile", side_effect=mock_isfile):
with patch("os.access", side_effect=mock_access):
result = RustAnalyzer.DependencyProvider._ensure_rust_analyzer_installed()
assert result == "/usr/local/bin/rust-analyzer"
@pytest.mark.rust
@pytest.mark.skipif(IS_WINDOWS, reason="Unix cargo path - Windows has separate test")
def test_detect_from_cargo_install_path(self):
"""
GIVEN rustup is NOT available
AND rust-analyzer is installed via `cargo install rust-analyzer`
AND it is NOT in PATH or Homebrew locations
WHEN _ensure_rust_analyzer_installed is called
THEN it should find ~/.cargo/bin/rust-analyzer
WHY: `cargo install rust-analyzer` is a common installation method.
The binary lands in ~/.cargo/bin which may not be in PATH.
"""
from solidlsp.language_servers.rust_analyzer import RustAnalyzer
cargo_path = os.path.expanduser("~/.cargo/bin/rust-analyzer")
def mock_isfile(path):
return path == cargo_path
def mock_access(path, mode):
return path == cargo_path
with patch.object(RustAnalyzer.DependencyProvider, "_get_rust_analyzer_via_rustup", return_value=None):
with patch("shutil.which", return_value=None):
with patch("os.path.isfile", side_effect=mock_isfile):
with patch("os.access", side_effect=mock_access):
result = RustAnalyzer.DependencyProvider._ensure_rust_analyzer_installed()
assert result == cargo_path
@pytest.mark.rust
def test_detect_from_rustup_when_available(self):
"""
GIVEN rustup has rust-analyzer installed
WHEN _ensure_rust_analyzer_installed is called
THEN it should return the rustup path
WHY: Rustup is checked FIRST to avoid picking up incorrect aliases from PATH.
This ensures compatibility with the toolchain.
"""
from solidlsp.language_servers.rust_analyzer import RustAnalyzer
with patch("shutil.which", return_value=None):
with patch("os.path.isfile", return_value=False):
with patch.object(
RustAnalyzer.DependencyProvider,
"_get_rust_analyzer_via_rustup",
return_value="/home/user/.rustup/toolchains/stable-x86_64-unknown-linux-gnu/bin/rust-analyzer",
):
result = RustAnalyzer.DependencyProvider._ensure_rust_analyzer_installed()
assert "rustup" in result or ".rustup" in result
@pytest.mark.rust
@pytest.mark.skipif(IS_WINDOWS, reason="Unix error messages - Windows has separate test")
def test_error_message_lists_searched_locations_when_not_found(self):
"""
GIVEN rust-analyzer is NOT installed anywhere
AND rustup is NOT installed
WHEN _ensure_rust_analyzer_installed is called
THEN it should raise RuntimeError with helpful message listing searched locations
WHY: Users need to know WHERE Serena looked so they can fix their installation.
The old error "Neither rust-analyzer nor rustup is installed" was unhelpful.
"""
from solidlsp.language_servers.rust_analyzer import RustAnalyzer
with patch("shutil.which", return_value=None):
with patch("os.path.isfile", return_value=False):
with patch.object(RustAnalyzer.DependencyProvider, "_get_rust_analyzer_via_rustup", return_value=None):
with patch.object(RustAnalyzer.DependencyProvider, "_get_rustup_version", return_value=None):
with pytest.raises(RuntimeError) as exc_info:
RustAnalyzer.DependencyProvider._ensure_rust_analyzer_installed()
error_message = str(exc_info.value)
# Error should list the locations that were searched (Unix paths)
assert "/opt/homebrew/bin/rust-analyzer" in error_message or "Homebrew" in error_message
assert "cargo" in error_message.lower() or ".cargo/bin" in error_message
# Error should suggest installation methods
assert "rustup" in error_message.lower() or "Rustup" in error_message
@pytest.mark.rust
def test_detection_priority_prefers_rustup_over_path_and_common_locations(self):
"""
GIVEN rust-analyzer is available via rustup
AND rust-analyzer also exists in PATH and common locations
WHEN _ensure_rust_analyzer_installed is called
THEN it should return the rustup version
WHY: Rustup provides version management and ensures compatibility.
Using PATH directly can pick up incorrect aliases or incompatible versions
that cause LSP crashes (as discovered in CI failures).
"""
from solidlsp.language_servers.rust_analyzer import RustAnalyzer
rustup_path = "/home/user/.rustup/toolchains/stable-x86_64-unknown-linux-gnu/bin/rust-analyzer"
# Rustup has rust-analyzer, PATH also has it, common locations also exist
with patch.object(RustAnalyzer.DependencyProvider, "_get_rust_analyzer_via_rustup", return_value=rustup_path):
with patch("shutil.which", return_value="/custom/path/rust-analyzer"):
with patch("os.path.isfile", return_value=True):
with patch("os.access", return_value=True):
result = RustAnalyzer.DependencyProvider._ensure_rust_analyzer_installed()
# Should use rustup version, NOT PATH or common locations
assert result == rustup_path
@pytest.mark.rust
@pytest.mark.skipif(IS_WINDOWS, reason="Uses Unix paths - Windows has different behavior")
def test_skips_nonexecutable_files(self):
"""
GIVEN a file exists at a detection path but is NOT executable
WHEN _ensure_rust_analyzer_installed is called
THEN it should skip that path and continue checking others
WHY: A non-executable file (e.g., broken symlink, wrong permissions)
should not be returned as a valid rust-analyzer path.
"""
from solidlsp.language_servers.rust_analyzer import RustAnalyzer
def mock_isfile(path):
# File exists at Homebrew location but not executable
return path == "/opt/homebrew/bin/rust-analyzer"
def mock_access(path, mode):
# Homebrew location exists but not executable
if path == "/opt/homebrew/bin/rust-analyzer":
return False
# Cargo location is executable
if path == os.path.expanduser("~/.cargo/bin/rust-analyzer"):
return True
return False
def mock_isfile_for_cargo(path):
return path in ["/opt/homebrew/bin/rust-analyzer", os.path.expanduser("~/.cargo/bin/rust-analyzer")]
with patch.object(RustAnalyzer.DependencyProvider, "_get_rust_analyzer_via_rustup", return_value=None):
with patch("shutil.which", return_value=None):
with patch("os.path.isfile", side_effect=mock_isfile_for_cargo):
with patch("os.access", side_effect=mock_access):
result = RustAnalyzer.DependencyProvider._ensure_rust_analyzer_installed()
# Should skip non-executable Homebrew and use cargo
assert result == os.path.expanduser("~/.cargo/bin/rust-analyzer")
@pytest.mark.rust
def test_detect_from_scoop_shims_path_on_windows(self):
"""
GIVEN rustup is NOT available
AND rust-analyzer is installed via Scoop on Windows
AND it is NOT in PATH
WHEN _ensure_rust_analyzer_installed is called
THEN it should find ~/scoop/shims/rust-analyzer.exe
WHY: Scoop is a popular package manager for Windows.
The binary lands in ~/scoop/shims which may not be in PATH.
"""
from solidlsp.language_servers.rust_analyzer import RustAnalyzer
home = pathlib.Path.home()
scoop_path = str(home / "scoop" / "shims" / "rust-analyzer.exe")
def mock_isfile(path):
return path == scoop_path
def mock_access(path, mode):
return path == scoop_path
with patch.object(RustAnalyzer.DependencyProvider, "_get_rust_analyzer_via_rustup", return_value=None):
with patch("platform.system", return_value="Windows"):
with patch("shutil.which", return_value=None):
with patch("os.path.isfile", side_effect=mock_isfile):
with patch("os.access", side_effect=mock_access):
result = RustAnalyzer.DependencyProvider._ensure_rust_analyzer_installed()
assert result == scoop_path
@pytest.mark.rust
def test_detect_from_cargo_path_on_windows(self):
"""
GIVEN rustup is NOT available
AND rust-analyzer is installed via cargo on Windows
AND it is NOT in PATH or Scoop locations
WHEN _ensure_rust_analyzer_installed is called
THEN it should find ~/.cargo/bin/rust-analyzer.exe
WHY: `cargo install rust-analyzer` works on Windows.
The binary has .exe extension and lands in ~/.cargo/bin.
"""
from solidlsp.language_servers.rust_analyzer import RustAnalyzer
home = pathlib.Path.home()
cargo_path = str(home / ".cargo" / "bin" / "rust-analyzer.exe")
def mock_isfile(path):
return path == cargo_path
def mock_access(path, mode):
return path == cargo_path
with patch.object(RustAnalyzer.DependencyProvider, "_get_rust_analyzer_via_rustup", return_value=None):
with patch("platform.system", return_value="Windows"):
with patch("shutil.which", return_value=None):
with patch("os.path.isfile", side_effect=mock_isfile):
with patch("os.access", side_effect=mock_access):
result = RustAnalyzer.DependencyProvider._ensure_rust_analyzer_installed()
assert result == cargo_path
@pytest.mark.rust
def test_windows_error_message_suggests_windows_package_managers(self):
"""
GIVEN rust-analyzer is NOT installed anywhere on Windows
AND rustup is NOT installed
WHEN _ensure_rust_analyzer_installed is called
THEN it should raise RuntimeError with Windows-specific installation suggestions
WHY: Windows users need Windows-specific package manager suggestions
(Scoop, Chocolatey) instead of Homebrew/apt.
"""
from solidlsp.language_servers.rust_analyzer import RustAnalyzer
with patch("platform.system", return_value="Windows"):
with patch("shutil.which", return_value=None):
with patch("os.path.isfile", return_value=False):
with patch.object(RustAnalyzer.DependencyProvider, "_get_rust_analyzer_via_rustup", return_value=None):
with patch.object(RustAnalyzer.DependencyProvider, "_get_rustup_version", return_value=None):
with pytest.raises(RuntimeError) as exc_info:
RustAnalyzer.DependencyProvider._ensure_rust_analyzer_installed()
error_message = str(exc_info.value)
# Error should suggest Windows-specific package managers
assert "Scoop" in error_message or "scoop" in error_message
assert "Chocolatey" in error_message or "choco" in error_message
# Should NOT suggest Homebrew on Windows
assert "Homebrew" not in error_message and "brew" not in error_message
@pytest.mark.rust
def test_auto_install_via_rustup_when_not_found(self):
"""
GIVEN rust-analyzer is NOT installed anywhere
AND rustup IS installed
WHEN _ensure_rust_analyzer_installed is called
AND rustup component add succeeds
THEN it should return the rustup-installed path
WHY: Serena should auto-install rust-analyzer via rustup when possible.
This matches the original behavior and enables CI to work without pre-installing.
"""
from solidlsp.language_servers.rust_analyzer import RustAnalyzer
with patch("shutil.which", return_value=None):
with patch("os.path.isfile", return_value=False):
with patch.object(RustAnalyzer.DependencyProvider, "_get_rust_analyzer_via_rustup") as mock_rustup_path:
# First call returns None (not installed), second returns path (after install)
mock_rustup_path.side_effect = [None, "/home/user/.rustup/toolchains/stable/bin/rust-analyzer"]
with patch.object(RustAnalyzer.DependencyProvider, "_get_rustup_version", return_value="1.70.0"):
with patch("subprocess.run") as mock_run:
mock_run.return_value = MagicMock(returncode=0, stdout="", stderr="")
result = RustAnalyzer.DependencyProvider._ensure_rust_analyzer_installed()
assert result == "/home/user/.rustup/toolchains/stable/bin/rust-analyzer"
mock_run.assert_called_once()
assert mock_run.call_args[0][0] == ["rustup", "component", "add", "rust-analyzer"]
@pytest.mark.rust
def test_auto_install_failure_falls_through_to_common_paths(self):
"""
GIVEN rust-analyzer is NOT installed anywhere
AND rustup IS installed
WHEN _ensure_rust_analyzer_installed is called
AND rustup component add FAILS
THEN it should fall through to common paths and eventually raise helpful error
WHY: The new resilient behavior tries all fallback options before failing.
When rustup auto-install fails, we try common paths (Homebrew, cargo, etc.)
as a last resort. This is more robust than failing immediately.
The error message should still help users install rust-analyzer.
"""
from solidlsp.language_servers.rust_analyzer import RustAnalyzer
with patch("shutil.which", return_value=None):
with patch("os.path.isfile", return_value=False):
with patch.object(RustAnalyzer.DependencyProvider, "_get_rust_analyzer_via_rustup", return_value=None):
with patch.object(RustAnalyzer.DependencyProvider, "_get_rustup_version", return_value="1.70.0"):
with patch("subprocess.run") as mock_run:
mock_run.return_value = MagicMock(
returncode=1, stdout="", stderr="error: component 'rust-analyzer' is not available"
)
with pytest.raises(RuntimeError) as exc_info:
RustAnalyzer.DependencyProvider._ensure_rust_analyzer_installed()
error_message = str(exc_info.value)
# Error should provide helpful installation instructions
assert "rust-analyzer is not installed" in error_message.lower()
assert "rustup" in error_message.lower() # Should suggest rustup installation
@pytest.mark.rust
def test_auto_install_success_but_binary_not_found_falls_through(self):
"""
GIVEN rust-analyzer is NOT installed anywhere
AND rustup IS installed
WHEN _ensure_rust_analyzer_installed is called
AND rustup component add SUCCEEDS
BUT the binary is still not found after installation
THEN it should fall through to common paths and eventually raise helpful error
WHY: Even if rustup install reports success but binary isn't found,
we try common paths as fallback. The final error provides installation
guidance to help users resolve the issue.
"""
from solidlsp.language_servers.rust_analyzer import RustAnalyzer
with patch("shutil.which", return_value=None):
with patch("os.path.isfile", return_value=False):
with patch.object(RustAnalyzer.DependencyProvider, "_get_rust_analyzer_via_rustup", return_value=None):
with patch.object(RustAnalyzer.DependencyProvider, "_get_rustup_version", return_value="1.70.0"):
with patch("subprocess.run") as mock_run:
mock_run.return_value = MagicMock(returncode=0, stdout="", stderr="")
with pytest.raises(RuntimeError) as exc_info:
RustAnalyzer.DependencyProvider._ensure_rust_analyzer_installed()
error_message = str(exc_info.value)
# Error should indicate rust-analyzer is not available and provide install instructions
assert "rust-analyzer is not installed" in error_message.lower()
assert "searched locations" in error_message.lower() # Should show what was checked
class TestRustAnalyzerDetectionIntegration:
"""
Integration tests that verify detection works on the current system.
These tests are skipped if rust-analyzer is not installed.
"""
@pytest.mark.rust
def test_detection_finds_installed_rust_analyzer(self):
"""
GIVEN rust-analyzer is installed on this system (via any method)
WHEN _ensure_rust_analyzer_installed is called
THEN it should return a valid path
This test verifies the detection logic works end-to-end on the current system.
"""
import shutil
from solidlsp.language_servers.rust_analyzer import RustAnalyzer
# Skip if rust-analyzer is not installed at all
if not shutil.which("rust-analyzer"):
# Check common locations
common_paths = [
"/opt/homebrew/bin/rust-analyzer",
"/usr/local/bin/rust-analyzer",
os.path.expanduser("~/.cargo/bin/rust-analyzer"),
]
if not any(os.path.isfile(p) and os.access(p, os.X_OK) for p in common_paths):
pytest.skip("rust-analyzer not installed on this system")
result = RustAnalyzer.DependencyProvider._ensure_rust_analyzer_installed()
assert result is not None
assert os.path.isfile(result)
assert os.access(result, os.X_OK)
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/rust/test_rust_analyzer_detection.py",
"license": "MIT License",
"lines": 399,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:src/solidlsp/language_servers/matlab_language_server.py | """
MATLAB language server integration using the official MathWorks MATLAB Language Server.
Architecture:
This module uses the MathWorks MATLAB VS Code extension (mathworks.language-matlab)
which contains a Node.js-based language server. The extension is downloaded from the
VS Code Marketplace and extracted locally. The language server spawns a real MATLAB
process to provide code intelligence - it is NOT a standalone static analyzer.
Flow: Serena -> Node.js LSP Server -> MATLAB Process -> Code Analysis
Why MATLAB installation is required:
The language server launches an actual MATLAB session (via MatlabSession.js) to perform
code analysis, diagnostics, and other features. Without MATLAB, the LSP cannot function.
This is different from purely static analyzers that parse code without execution.
Requirements:
- MATLAB R2021b or later must be installed and licensed
- Node.js must be installed (for running the language server)
- MATLAB path can be specified via MATLAB_PATH environment variable or auto-detected
The MATLAB language server provides:
- Code diagnostics (publishDiagnostics)
- Code completions (completionProvider)
- Go to definition (definitionProvider)
- Find references (referencesProvider)
- Document symbols (documentSymbol)
- Document formatting (documentFormattingProvider)
- Function signature help (signatureHelpProvider)
- Symbol rename (renameProvider)
"""
import glob
import logging
import os
import pathlib
import platform
import shutil
import threading
import zipfile
from typing import Any, cast
import requests
from solidlsp.ls import LanguageServerDependencyProvider, LSPFileBuffer, SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import DocumentSymbol, InitializeParams, SymbolInformation
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
# Environment variable for MATLAB installation path
MATLAB_PATH_ENV_VAR = "MATLAB_PATH"
# VS Code Marketplace URL for MATLAB extension
MATLAB_EXTENSION_URL = (
"https://marketplace.visualstudio.com/_apis/public/gallery/publishers/MathWorks/vsextensions/language-matlab/latest/vspackage"
)
class MatlabLanguageServer(SolidLanguageServer):
"""
Provides MATLAB specific instantiation of the LanguageServer class using the official
MathWorks MATLAB Language Server.
The MATLAB language server requires:
- MATLAB R2021b or later installed on the system
- Node.js for running the language server
The language server is automatically downloaded from the VS Code marketplace
(MathWorks.language-matlab extension) and extracted.
You can pass the following entries in ls_specific_settings["matlab"]:
- matlab_path: Path to MATLAB installation (overrides MATLAB_PATH env var)
"""
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Creates a MatlabLanguageServer instance. This class is not meant to be instantiated directly.
Use LanguageServer.create() instead.
"""
super().__init__(
config,
repository_root_path,
None,
"matlab",
solidlsp_settings,
)
assert isinstance(self._dependency_provider, self.DependencyProvider)
self._matlab_path = self._dependency_provider.get_matlab_path()
self.server_ready = threading.Event()
self.initialize_searcher_command_available = threading.Event()
def _create_dependency_provider(self) -> LanguageServerDependencyProvider:
return self.DependencyProvider(self._custom_settings, self._ls_resources_dir)
class DependencyProvider(LanguageServerDependencyProvider):
def __init__(self, custom_settings: SolidLSPSettings.CustomLSSettings, ls_resources_dir: str):
super().__init__(custom_settings, ls_resources_dir)
self._matlab_path: str | None = None
@classmethod
def _download_matlab_extension(cls, url: str, target_dir: str) -> bool:
"""
Download and extract the MATLAB extension from VS Code marketplace.
The VS Code marketplace packages extensions as .vsix files (which are ZIP archives).
This method downloads the VSIX file and extracts it to get the language server.
Args:
url: VS Code marketplace URL for the MATLAB extension
target_dir: Directory where the extension will be extracted
Returns:
True if successful, False otherwise
"""
try:
log.info(f"Downloading MATLAB extension from {url}")
# Create target directory for the extension
os.makedirs(target_dir, exist_ok=True)
# Download with proper headers to mimic VS Code marketplace client
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
"Accept": "application/octet-stream, application/vsix, */*",
}
response = requests.get(url, headers=headers, stream=True, timeout=300)
response.raise_for_status()
# Save to temporary VSIX file
temp_file = os.path.join(target_dir, "matlab_extension_temp.vsix")
total_size = int(response.headers.get("content-length", 0))
log.info(f"Downloading {total_size / 1024 / 1024:.1f} MB...")
with open(temp_file, "wb") as f:
downloaded = 0
for chunk in response.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
downloaded += len(chunk)
if total_size > 0 and downloaded % (10 * 1024 * 1024) == 0:
progress = (downloaded / total_size) * 100
log.info(f"Download progress: {progress:.1f}%")
log.info("Download complete, extracting...")
# Extract VSIX file (VSIX files are ZIP archives)
with zipfile.ZipFile(temp_file, "r") as zip_ref:
zip_ref.extractall(target_dir)
# Clean up temp file
os.remove(temp_file)
log.info("MATLAB extension extracted successfully")
return True
except Exception as e:
log.error(f"Error downloading/extracting MATLAB extension: {e}")
return False
def _find_matlab_extension(self) -> str | None:
"""
Find MATLAB extension in various locations.
Search order:
1. Environment variable (MATLAB_EXTENSION_PATH)
2. Default download location (~/.serena/ls_resources/matlab-extension)
3. VS Code installed extensions
Returns:
Path to MATLAB extension directory or None if not found
"""
# Check environment variable
env_path = os.environ.get("MATLAB_EXTENSION_PATH")
if env_path and os.path.exists(env_path):
log.debug(f"Found MATLAB extension via MATLAB_EXTENSION_PATH: {env_path}")
return env_path
elif env_path:
log.warning(f"MATLAB_EXTENSION_PATH set but directory not found: {env_path}")
# Check default download location
default_path = os.path.join(self._ls_resources_dir, "matlab-extension", "extension")
if os.path.exists(default_path):
log.debug(f"Found MATLAB extension in default location: {default_path}")
return default_path
# Search VS Code extensions
vscode_extensions_dir = os.path.expanduser("~/.vscode/extensions")
if os.path.exists(vscode_extensions_dir):
for entry in os.listdir(vscode_extensions_dir):
if entry.startswith("mathworks.language-matlab"):
ext_path = os.path.join(vscode_extensions_dir, entry)
if os.path.isdir(ext_path):
log.debug(f"Found MATLAB extension in VS Code: {ext_path}")
return ext_path
log.debug("MATLAB extension not found in any known location")
return None
def _download_and_install_matlab_extension(self) -> str | None:
"""
Download and install MATLAB extension from VS Code marketplace.
Returns:
Path to installed extension or None if download failed
"""
matlab_extension_dir = os.path.join(self._ls_resources_dir, "matlab-extension")
log.info(f"Downloading MATLAB extension from: {MATLAB_EXTENSION_URL}")
if self._download_matlab_extension(MATLAB_EXTENSION_URL, matlab_extension_dir):
extension_path = os.path.join(matlab_extension_dir, "extension")
if os.path.exists(extension_path):
log.info("MATLAB extension downloaded and installed successfully")
return extension_path
else:
log.error(f"Download completed but extension not found at: {extension_path}")
else:
log.error("Failed to download MATLAB extension from marketplace")
return None
@classmethod
def _get_executable_path(cls, extension_path: str) -> str:
"""
Get the path to the MATLAB language server executable based on platform.
The language server is a Node.js script located in the extension's server directory.
"""
# The MATLAB extension bundles the language server in the 'server' directory
server_dir = os.path.join(extension_path, "server", "out")
main_script = os.path.join(server_dir, "index.js")
if os.path.exists(main_script):
return main_script
# Alternative location
alt_script = os.path.join(extension_path, "out", "index.js")
if os.path.exists(alt_script):
return alt_script
raise RuntimeError(f"MATLAB language server script not found in extension at {extension_path}")
@staticmethod
def _find_matlab_installation() -> str:
"""
Find MATLAB installation path.
Search order:
1. MATLAB_PATH environment variable
2. Common installation locations based on platform
Returns:
Path to MATLAB installation directory.
Raises:
RuntimeError: If MATLAB installation is not found.
"""
# Check environment variable first
matlab_path = os.environ.get(MATLAB_PATH_ENV_VAR)
if matlab_path and os.path.isdir(matlab_path):
log.info(f"Using MATLAB from environment variable {MATLAB_PATH_ENV_VAR}: {matlab_path}")
return matlab_path
system = platform.system()
if system == "Darwin": # macOS
# Check common macOS locations
search_patterns = [
"/Applications/MATLAB_*.app",
"/Volumes/*/Applications/MATLAB_*.app",
os.path.expanduser("~/Applications/MATLAB_*.app"),
]
for pattern in search_patterns:
matches = sorted(glob.glob(pattern), reverse=True) # Newest version first
for match in matches:
if os.path.isdir(match):
log.info(f"Found MATLAB installation: {match}")
return match
elif system == "Windows":
# Check common Windows locations
search_patterns = [
"C:\\Program Files\\MATLAB\\R*",
"C:\\Program Files (x86)\\MATLAB\\R*",
]
for pattern in search_patterns:
matches = sorted(glob.glob(pattern), reverse=True)
for match in matches:
if os.path.isdir(match):
log.info(f"Found MATLAB installation: {match}")
return match
elif system == "Linux":
# Check common Linux locations
search_patterns = [
"/usr/local/MATLAB/R*",
"/opt/MATLAB/R*",
os.path.expanduser("~/MATLAB/R*"),
]
for pattern in search_patterns:
matches = sorted(glob.glob(pattern), reverse=True)
for match in matches:
if os.path.isdir(match):
log.info(f"Found MATLAB installation: {match}")
return match
raise RuntimeError(
f"MATLAB installation not found. Set the {MATLAB_PATH_ENV_VAR} environment variable "
"to your MATLAB installation directory (e.g., /Applications/MATLAB_R2024b.app on macOS, "
"C:\\Program Files\\MATLAB\\R2024b on Windows, or /usr/local/MATLAB/R2024b on Linux)."
)
def get_matlab_path(self) -> str:
"""Get MATLAB path from settings or auto-detect."""
if self._matlab_path is not None:
return self._matlab_path
matlab_path = self._custom_settings.get("matlab_path")
if not matlab_path:
matlab_path = self._find_matlab_installation() # Raises RuntimeError if not found
# Verify MATLAB path exists
if not os.path.isdir(matlab_path):
raise RuntimeError(f"MATLAB installation directory does not exist: {matlab_path}")
log.info(f"Using MATLAB installation: {matlab_path}")
self._matlab_path = matlab_path
return matlab_path
def create_launch_command(self) -> list[str]:
# Verify node is installed
node_path = shutil.which("node")
if node_path is None:
raise RuntimeError("Node.js is not installed or isn't in PATH. Please install Node.js and try again.")
# Find existing extension or download if needed
extension_path = self._find_matlab_extension()
if extension_path is None:
log.info("MATLAB extension not found on disk, attempting to download...")
extension_path = self._download_and_install_matlab_extension()
if extension_path is None:
raise RuntimeError(
"Failed to locate or download MATLAB Language Server. Please either:\n"
"1. Set MATLAB_EXTENSION_PATH environment variable to the MATLAB extension directory\n"
"2. Install the MATLAB extension in VS Code (MathWorks.language-matlab)\n"
"3. Ensure internet connection for automatic download"
)
# Get the language server script path
server_script = self._get_executable_path(extension_path)
if not os.path.exists(server_script):
raise RuntimeError(f"MATLAB Language Server script not found at: {server_script}")
# Build the command to run the language server
# The MATLAB language server is run via Node.js with the --stdio flag
cmd = [node_path, server_script, "--stdio"]
return cmd
def create_launch_command_env(self) -> dict[str, str]:
return {
"MATLAB_INSTALL_PATH": self.get_matlab_path(),
}
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""Return the initialize params for the MATLAB Language Server."""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"locale": "en",
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"completion": {
"dynamicRegistration": True,
"completionItem": {"snippetSupport": True},
},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
"hover": {"dynamicRegistration": True, "contentFormat": ["markdown", "plaintext"]},
"signatureHelp": {"dynamicRegistration": True},
"codeAction": {"dynamicRegistration": True},
"formatting": {"dynamicRegistration": True},
"rename": {"dynamicRegistration": True, "prepareSupport": True},
"publishDiagnostics": {"relatedInformation": True},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"symbol": {"dynamicRegistration": True},
},
},
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
}
return cast(InitializeParams, initialize_params)
def _start_server(self) -> None:
"""Start the MATLAB Language Server and wait for it to be ready."""
root_uri = pathlib.Path(self.repository_root_path).as_uri()
def register_capability_handler(params: dict) -> None:
assert "registrations" in params
for registration in params["registrations"]:
if registration["method"] == "workspace/executeCommand":
self.initialize_searcher_command_available.set()
return
def execute_client_command_handler(params: dict) -> list:
return []
def workspace_folders_handler(params: dict) -> list:
"""Handle workspace/workspaceFolders request from the server."""
return [{"uri": root_uri, "name": os.path.basename(self.repository_root_path)}]
def workspace_configuration_handler(params: dict) -> list:
"""Handle workspace/configuration request from the server."""
items = params.get("items", [])
result = []
for item in items:
section = item.get("section", "")
if section == "MATLAB":
# Return MATLAB configuration
result.append({"installPath": self._matlab_path, "matlabConnectionTiming": "onStart"})
else:
result.append({})
return result
def do_nothing(params: dict) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
message_text = msg.get("message", "")
# Check for MATLAB language server ready signals
# Wait for "MVM attach success" or "Adding workspace folder" which indicates MATLAB is fully ready
# Note: "connected to" comes earlier but the server isn't fully ready at that point
if "mvm attach success" in message_text.lower() or "adding workspace folder" in message_text.lower():
log.info("MATLAB language server ready signal detected (MVM attached)")
self.server_ready.set()
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_request("workspace/executeClientCommand", execute_client_command_handler)
self.server.on_request("workspace/workspaceFolders", workspace_folders_handler)
self.server.on_request("workspace/configuration", workspace_configuration_handler)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting MATLAB server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
log.debug(f"Received initialize response from MATLAB server: {init_response}")
# Verify basic capabilities
capabilities = init_response.get("capabilities", {})
assert capabilities.get("textDocumentSync") in [1, 2], "Expected Full or Incremental text sync"
# Log available capabilities
if "completionProvider" in capabilities:
log.info("MATLAB server supports completions")
if "definitionProvider" in capabilities:
log.info("MATLAB server supports go-to-definition")
if "referencesProvider" in capabilities:
log.info("MATLAB server supports find-references")
if "documentSymbolProvider" in capabilities:
log.info("MATLAB server supports document symbols")
if "documentFormattingProvider" in capabilities:
log.info("MATLAB server supports document formatting")
if "renameProvider" in capabilities:
log.info("MATLAB server supports rename")
self.server.notify.initialized({})
# Wait for server readiness with timeout
# MATLAB takes longer to start than most language servers (typically 10-30 seconds)
log.info("Waiting for MATLAB language server to be ready (this may take up to 60 seconds)...")
if not self.server_ready.wait(timeout=60.0):
# Fallback: assume server is ready after timeout
log.info("Timeout waiting for MATLAB server ready signal, proceeding anyway")
self.server_ready.set()
else:
log.info("MATLAB server initialization complete")
def is_ignored_dirname(self, dirname: str) -> bool:
"""Define MATLAB-specific directories to ignore."""
return super().is_ignored_dirname(dirname) or dirname in [
"slprj", # Simulink project files
"codegen", # Code generation output
"sldemo_cache", # Simulink demo cache
"helperFiles", # Common helper file directories
]
def _request_document_symbols(
self, relative_file_path: str, file_data: LSPFileBuffer | None
) -> list[SymbolInformation] | list[DocumentSymbol] | None:
"""
Override to normalize MATLAB symbol names.
The MATLAB LSP sometimes returns symbol names as lists instead of strings,
particularly for script sections (cell mode markers like %%). This method
normalizes the names to strings for compatibility with the unified symbol format.
"""
symbols = super()._request_document_symbols(relative_file_path, file_data)
if symbols is None or len(symbols) == 0:
return symbols
self._normalize_matlab_symbols(symbols)
return symbols
def _normalize_matlab_symbols(self, symbols: list[SymbolInformation] | list[DocumentSymbol]) -> None:
"""
Normalize MATLAB symbol names in-place.
MATLAB LSP returns section names as lists like ["Section Name"] instead of
strings. This converts them to plain strings.
"""
for symbol in symbols:
# MATLAB LSP returns names as lists for script sections, violating LSP spec
# Cast to Any to handle runtime type that differs from spec
name: Any = symbol.get("name")
if isinstance(name, list):
symbol["name"] = name[0] if name else ""
log.debug("Normalized MATLAB symbol name from list to string")
# Recursively normalize children if present
children: Any = symbol.get("children")
if children and isinstance(children, list):
self._normalize_matlab_symbols(children)
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/matlab_language_server.py",
"license": "MIT License",
"lines": 457,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:test/solidlsp/matlab/test_matlab_basic.py | """
Basic integration tests for the MATLAB language server functionality.
These tests validate the functionality of the language server APIs
like request_document_symbols using the MATLAB test repository.
Requirements:
- MATLAB R2021b or later must be installed
- MATLAB_PATH environment variable should be set to MATLAB installation directory
- Node.js and npm must be installed
"""
import os
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
# Skip all tests if MATLAB is not available
pytestmark = pytest.mark.matlab
# Check if MATLAB is available
MATLAB_AVAILABLE = os.environ.get("MATLAB_PATH") is not None or any(
os.path.exists(p)
for p in [
"/Applications/MATLAB_R2024b.app",
"/Applications/MATLAB_R2025b.app",
"/Volumes/S1/Applications/MATLAB_R2024b.app",
"/Volumes/S1/Applications/MATLAB_R2025b.app",
]
)
@pytest.mark.skipif(not MATLAB_AVAILABLE, reason="MATLAB installation not found")
class TestMatlabLanguageServerBasics:
"""Test basic functionality of the MATLAB language server."""
@pytest.mark.parametrize("language_server", [Language.MATLAB], indirect=True)
def test_matlab_language_server_initialization(self, language_server: SolidLanguageServer) -> None:
"""Test that MATLAB language server can be initialized successfully."""
assert language_server is not None
assert language_server.language == Language.MATLAB
@pytest.mark.parametrize("language_server", [Language.MATLAB], indirect=True)
def test_matlab_request_document_symbols_class(self, language_server: SolidLanguageServer) -> None:
"""Test request_document_symbols for MATLAB class file."""
# Test getting symbols from Calculator.m (class file)
all_symbols, _root_symbols = language_server.request_document_symbols("Calculator.m").get_all_symbols_and_roots()
# Extract class symbols (LSP Symbol Kind 5 for class)
class_symbols = [symbol for symbol in all_symbols if symbol.get("kind") == 5]
class_names = [symbol["name"] for symbol in class_symbols]
# Should find the Calculator class
assert "Calculator" in class_names, "Should find Calculator class"
# Extract method symbols (LSP Symbol Kind 6 for method or 12 for function)
method_symbols = [symbol for symbol in all_symbols if symbol.get("kind") in [6, 12]]
method_names = [symbol["name"] for symbol in method_symbols]
# Should find key methods
expected_methods = ["add", "subtract", "multiply", "divide"]
for method in expected_methods:
assert method in method_names, f"Should find {method} method in Calculator class"
@pytest.mark.parametrize("language_server", [Language.MATLAB], indirect=True)
def test_matlab_request_document_symbols_function(self, language_server: SolidLanguageServer) -> None:
"""Test request_document_symbols for MATLAB function file."""
# Test getting symbols from lib/mathUtils.m (function file)
all_symbols, _root_symbols = language_server.request_document_symbols("lib/mathUtils.m").get_all_symbols_and_roots()
# Extract function symbols (LSP Symbol Kind 12 for function)
function_symbols = [symbol for symbol in all_symbols if symbol.get("kind") == 12]
function_names = [symbol["name"] for symbol in function_symbols]
# Should find the main mathUtils function
assert "mathUtils" in function_names, "Should find mathUtils function"
# Should also find nested/local functions
expected_local_functions = ["computeFactorial", "computeFibonacci", "checkPrime", "computeStats"]
for func in expected_local_functions:
assert func in function_names, f"Should find {func} local function"
@pytest.mark.parametrize("language_server", [Language.MATLAB], indirect=True)
def test_matlab_request_document_symbols_script(self, language_server: SolidLanguageServer) -> None:
"""Test request_document_symbols for MATLAB script file."""
# Test getting symbols from main.m (script file)
all_symbols, _root_symbols = language_server.request_document_symbols("main.m").get_all_symbols_and_roots()
# Scripts may have variables and sections, but less structured symbols
# Just verify we can get symbols without errors
assert all_symbols is not None
@pytest.mark.skipif(not MATLAB_AVAILABLE, reason="MATLAB installation not found")
class TestMatlabLanguageServerReferences:
"""Test find references functionality of the MATLAB language server."""
@pytest.mark.parametrize("language_server", [Language.MATLAB], indirect=True)
def test_matlab_find_references_within_file(self, language_server: SolidLanguageServer) -> None:
"""Test finding references within a single MATLAB file."""
# Find references to 'result' variable in Calculator.m
# This is a basic test to verify references work
references = language_server.request_references("Calculator.m", 25, 12) # 'result' in add method
# Should find at least the definition
assert references is not None
@pytest.mark.parametrize("language_server", [Language.MATLAB], indirect=True)
def test_matlab_find_references_cross_file(self, language_server: SolidLanguageServer) -> None:
"""Test finding references across MATLAB files."""
# Find references to Calculator class used in main.m
references = language_server.request_references("main.m", 11, 8) # 'Calculator' reference
# Should find references in both main.m and Calculator.m
assert references is not None
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/matlab/test_matlab_basic.py",
"license": "MIT License",
"lines": 90,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:src/serena/util/gui.py | import os
import platform
def system_has_usable_display() -> bool:
system = platform.system()
# macOS and native Windows: assume display is available for desktop usage
if system == "Darwin" or system == "Windows":
return True
# Other systems, assumed to be Unix-like (Linux, FreeBSD, Cygwin/MSYS, etc.):
# detect display availability since users may operate in CLI contexts
else:
# Check X11 or Wayland - if environment variables are set to non-empty values, assume display is usable
display = os.environ.get("DISPLAY", "")
wayland_display = os.environ.get("WAYLAND_DISPLAY", "")
if display or wayland_display:
return True
return False
| {
"repo_id": "oraios/serena",
"file_path": "src/serena/util/gui.py",
"license": "MIT License",
"lines": 16,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
oraios/serena:src/solidlsp/language_servers/fsharp_language_server.py | """
Provides F# specific instantiation of the LanguageServer class.
"""
import logging
import os
import pathlib
import shutil
import threading
from pathlib import Path
from overrides import override
from serena.util.dotnet import DotNETUtil
from solidlsp.language_servers.common import RuntimeDependency, RuntimeDependencyCollection
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.ls_exceptions import SolidLSPException
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class FSharpLanguageServer(SolidLanguageServer):
"""
Provides F# specific instantiation of the LanguageServer class using Ionide LSP (FsAutoComplete).
Contains various configurations and settings specific to F# development.
"""
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Creates an FSharpLanguageServer instance. This class is not meant to be instantiated directly.
Use LanguageServer.create() instead.
"""
fsharp_lsp_executable_path = self._setup_runtime_dependencies(config, solidlsp_settings)
super().__init__(
config,
repository_root_path,
ProcessLaunchInfo(cmd=fsharp_lsp_executable_path, cwd=repository_root_path),
"fsharp",
solidlsp_settings,
)
self.server_ready = threading.Event()
self.initialize_searcher_command_available = threading.Event()
@override
def is_ignored_dirname(self, dirname: str) -> bool:
return super().is_ignored_dirname(dirname) or dirname in [
"bin",
"obj",
"packages",
".paket",
"paket-files",
".fake",
".ionide",
]
@classmethod
def _setup_runtime_dependencies(cls, config: LanguageServerConfig, solidlsp_settings: SolidLSPSettings) -> str:
"""
Setup runtime dependencies for F# Language Server and return the command to start the server.
"""
dotnet_exe = DotNETUtil("8.0", allow_higher_version=True).get_dotnet_path_or_raise()
RuntimeDependencyCollection(
[
RuntimeDependency(
id="fsautocomplete",
description="FsAutoComplete (Ionide F# Language Server)",
command="dotnet tool install --tool-path ./ fsautocomplete",
platform_id="any",
),
]
)
# Install FsAutoComplete if not already installed
fsharp_ls_dir = os.path.join(cls.ls_resources_dir(solidlsp_settings), "fsharp-lsp")
fsautocomplete_path = os.path.join(fsharp_ls_dir, "fsautocomplete")
# Handle Windows executable extension
if os.name == "nt":
fsautocomplete_path += ".exe"
if not os.path.exists(fsautocomplete_path):
log.info(f"FsAutoComplete executable not found at {fsautocomplete_path}. Installing...")
# Ensure the directory exists
os.makedirs(fsharp_ls_dir, exist_ok=True)
# Install FsAutoComplete using dotnet tool install
try:
import subprocess
result = subprocess.run(
[dotnet_exe, "tool", "install", "--tool-path", fsharp_ls_dir, "fsautocomplete"],
cwd=fsharp_ls_dir,
capture_output=True,
text=True,
check=True,
)
log.info("FsAutoComplete installed successfully")
log.debug(f"Installation output: {result.stdout}")
except subprocess.CalledProcessError as e:
log.error(f"Failed to install FsAutoComplete: {e.stderr}")
raise RuntimeError(f"Failed to install FsAutoComplete: {e.stderr}")
if not os.path.exists(fsautocomplete_path):
raise FileNotFoundError(
f"FsAutoComplete executable not found at {fsautocomplete_path}, something went wrong with the installation."
)
# FsAutoComplete uses --lsp flag for LSP mode
return f"{fsautocomplete_path} --adaptive-lsp-server-enabled --project-graph-enabled --use-fcs-transparent-compiler"
def _get_initialize_params(self) -> InitializeParams:
"""
Returns the initialize params for the F# Language Server.
"""
root_uri = pathlib.Path(self.repository_root_path).as_uri()
initialize_params = {
"processId": os.getpid(),
"rootPath": self.repository_root_path,
"rootUri": root_uri,
"workspaceFolders": [{"name": "workspace", "uri": root_uri}],
"capabilities": {
"workspace": {
"applyEdit": True,
"workspaceEdit": {"documentChanges": True},
"didChangeConfiguration": {"dynamicRegistration": True},
"didChangeWatchedFiles": {"dynamicRegistration": True},
"symbol": {"dynamicRegistration": True},
"executeCommand": {"dynamicRegistration": True},
"configuration": True,
"workspaceFolders": True,
},
"textDocument": {
"synchronization": {
"dynamicRegistration": True,
"willSave": True,
"willSaveWaitUntil": True,
"didSave": True,
},
"completion": {
"dynamicRegistration": True,
"contextSupport": True,
"completionItem": {
"snippetSupport": True,
"commitCharactersSupport": True,
"documentationFormat": ["markdown", "plaintext"],
"deprecatedSupport": True,
},
},
"hover": {
"dynamicRegistration": True,
"contentFormat": ["markdown", "plaintext"],
},
"signatureHelp": {
"dynamicRegistration": True,
"signatureInformation": {"documentationFormat": ["markdown", "plaintext"]},
},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentHighlight": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"symbolKind": {"valueSet": list(range(1, 26))}, # All SymbolKind values
"hierarchicalDocumentSymbolSupport": True,
},
"codeAction": {
"dynamicRegistration": True,
"codeActionLiteralSupport": {
"codeActionKind": {
"valueSet": [
"",
"quickfix",
"refactor",
"refactor.extract",
"refactor.inline",
"refactor.rewrite",
"source",
"source.organizeImports",
]
}
},
},
"codeLens": {"dynamicRegistration": True},
"formatting": {"dynamicRegistration": True},
"rangeFormatting": {"dynamicRegistration": True},
"onTypeFormatting": {"dynamicRegistration": True},
"rename": {"dynamicRegistration": True},
"documentLink": {"dynamicRegistration": True},
"publishDiagnostics": {
"relatedInformation": True,
"versionSupport": False,
"tagSupport": {"valueSet": [1, 2]},
},
"implementation": {"dynamicRegistration": True},
"typeDefinition": {"dynamicRegistration": True},
"colorProvider": {"dynamicRegistration": True},
"foldingRange": {
"dynamicRegistration": True,
"rangeLimit": 5000,
"lineFoldingOnly": True,
},
"declaration": {"dynamicRegistration": True},
"selectionRange": {"dynamicRegistration": True},
},
"window": {
"workDoneProgress": True,
},
},
"initializationOptions": {
# F# specific initialization options
"automaticWorkspaceInit": True,
"abstractClassStubGeneration": True,
"abstractClassStubGenerationObjectIdentifier": "this",
"abstractClassStubGenerationMethodBody": 'failwith "Not Implemented"',
"addFsiWatcher": False,
"codeLenses": {"signature": {"enabled": True}, "references": {"enabled": True}},
"disableInMemoryProjectReferences": False,
"dotNetRoot": self._get_dotnet_root(),
"enableMSBuildProjectGraph": False,
"excludeProjectDirectories": ["paket-files"],
"externalAutocomplete": False,
"fsac": {"attachDebugger": False, "silencedLogs": [], "conserveMemory": False, "netCoreDllPath": ""},
"fsiExtraParameters": [],
"generateBinlog": False,
"interfaceStubGeneration": True,
"interfaceStubGenerationObjectIdentifier": "this",
"interfaceStubGenerationMethodBody": 'failwith "Not Implemented"',
"keywordsAutocomplete": True,
"linter": True,
"pipelineHints": {"enabled": True},
"recordStubGeneration": True,
"recordStubGenerationBody": 'failwith "Not Implemented"',
"resolveNamespaces": True,
"saveOnlyOpenFiles": False,
"showProjectExplorerIn": ["ionide", "solution"],
"simplifyNameAnalyzer": True,
"smartIndent": False,
"suggestGitignore": True,
"suggestSdkScripts": True,
"unionCaseStubGeneration": True,
"unionCaseStubGenerationBody": 'failwith "Not Implemented"',
"unusedDeclarationsAnalyzer": True,
"unusedOpensAnalyzer": True,
"verboseLogging": False,
"workspaceModePeekDeepLevel": 2,
"workspacePath": self.repository_root_path,
},
"trace": "off",
}
return initialize_params # type: ignore
def _get_dotnet_root(self) -> str:
"""
Get the .NET root directory.
"""
dotnet_exe = shutil.which("dotnet")
if dotnet_exe:
# Try to get the installation path
try:
import subprocess
result = subprocess.run([dotnet_exe, "--info"], capture_output=True, text=True, check=True)
lines = result.stdout.split("\n")
for line in lines:
if "Base Path:" in line or "Base path:" in line:
base_path = line.split(":", 1)[1].strip()
# Get the parent directory (remove 'sdk/version' part)
return str(Path(base_path).parent.parent)
except (subprocess.CalledProcessError, Exception):
pass
# Fallback: use the directory containing dotnet executable
if dotnet_exe:
return str(Path(dotnet_exe).parent)
return ""
def _start_server(self) -> None:
"""
Start the F# Language Server with custom handlers.
"""
def handle_window_log_message(params: dict) -> None:
"""Handle window/logMessage from the LSP server."""
message = params.get("message", "")
message_type = params.get("type", 1)
# Map LSP log levels to Python logging levels
level_map = {1: logging.ERROR, 2: logging.WARNING, 3: logging.INFO, 4: logging.DEBUG}
level = level_map.get(message_type, logging.INFO)
log.log(level, f"FsAutoComplete: {message}")
def handle_window_show_message(params: dict) -> None:
"""Handle window/showMessage from the LSP server."""
message = params.get("message", "")
message_type = params.get("type", 1)
# Map LSP message types to Python logging levels
level_map = {1: logging.ERROR, 2: logging.WARNING, 3: logging.INFO, 4: logging.DEBUG}
level = level_map.get(message_type, logging.INFO)
log.log(level, f"FsAutoComplete Message: {message}")
def handle_workspace_configuration(params: dict) -> list:
"""Handle workspace/configuration requests from the LSP server."""
# Return empty configuration for now
items = params.get("items", [])
return [None] * len(items)
def handle_client_register_capability(params: dict) -> None:
"""Handle client/registerCapability requests from the LSP server."""
# For now, just acknowledge the registration
return
def handle_client_unregister_capability(params: dict) -> None:
"""Handle client/unregisterCapability requests from the LSP server."""
# For now, just acknowledge the unregistration
return
def handle_work_done_progress_create(params: dict) -> None:
"""Handle window/workDoneProgress/create requests from the LSP server."""
# Just acknowledge the request - we don't need to track progress for now
return
# Register custom handlers
self.server.on_notification("window/logMessage", handle_window_log_message)
self.server.on_notification("window/showMessage", handle_window_show_message)
self.server.on_request("workspace/configuration", handle_workspace_configuration)
self.server.on_request("client/registerCapability", handle_client_register_capability)
self.server.on_request("client/unregisterCapability", handle_client_unregister_capability)
self.server.on_request("window/workDoneProgress/create", handle_work_done_progress_create)
log.info("Starting FsAutoComplete F# language server process")
try:
self.server.start()
except Exception as e:
log.error(f"Failed to start F# language server process: {e}")
raise SolidLSPException(f"Failed to start F# language server: {e}")
# Send initialization
initialize_params = self._get_initialize_params()
log.info("Sending initialize request to F# language server")
try:
self.server.send.initialize(initialize_params)
log.debug("Received initialize response from F# language server")
except Exception as e:
raise SolidLSPException(f"Failed to initialize F# language server for {self.repository_root_path}: {e}") from e
# Complete initialization
self.server.notify.initialized({})
log.info("F# language server initialized successfully")
@override
def _get_wait_time_for_cross_file_referencing(self) -> float:
"""
F# projects can be large and may need more time for cross-file analysis.
"""
return 15.0 # 15 seconds should be sufficient for most F# projects
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/fsharp_language_server.py",
"license": "MIT License",
"lines": 325,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/groovy_language_server.py | """
Provides Groovy specific instantiation of the LanguageServer class. Contains various configurations and settings specific to Groovy.
"""
import dataclasses
import logging
import os
import pathlib
import shlex
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import Language, LanguageServerConfig
from solidlsp.ls_utils import FileUtils, PlatformUtils
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
@dataclasses.dataclass
class GroovyRuntimeDependencyPaths:
"""
Stores the paths to the runtime dependencies of Groovy Language Server
"""
java_path: str
java_home_path: str
ls_jar_path: str
groovy_home_path: str | None = None
class GroovyLanguageServer(SolidLanguageServer):
"""
Provides Groovy specific instantiation of the LanguageServer class.
Contains various configurations and settings specific to Groovy.
"""
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Creates a Groovy Language Server instance. This class is not meant to be instantiated directly. Use LanguageServer.create() instead.
"""
runtime_dependency_paths = self._setup_runtime_dependencies(solidlsp_settings)
self.runtime_dependency_paths = runtime_dependency_paths
# Get jar options from configuration
ls_jar_options = []
if solidlsp_settings.ls_specific_settings:
groovy_settings = solidlsp_settings.get_ls_specific_settings(Language.GROOVY)
jar_options_str = groovy_settings.get("ls_jar_options", "")
if jar_options_str:
ls_jar_options = shlex.split(jar_options_str)
log.info(f"Using Groovy LS JAR options from configuration: {jar_options_str}")
# Create command to execute the Groovy Language Server
cmd = [self.runtime_dependency_paths.java_path, "-jar", self.runtime_dependency_paths.ls_jar_path]
cmd.extend(ls_jar_options)
# Set environment variables including JAVA_HOME
proc_env = {"JAVA_HOME": self.runtime_dependency_paths.java_home_path}
super().__init__(
config,
repository_root_path,
ProcessLaunchInfo(cmd=cmd, env=proc_env, cwd=repository_root_path),
"groovy",
solidlsp_settings,
)
log.info(f"Starting Groovy Language Server with jar options: {ls_jar_options}")
@classmethod
def _setup_runtime_dependencies(cls, solidlsp_settings: SolidLSPSettings) -> GroovyRuntimeDependencyPaths:
"""
Setup runtime dependencies for Groovy Language Server and return paths.
"""
platform_id = PlatformUtils.get_platform_id()
# Verify platform support
assert (
platform_id.value.startswith("win-") or platform_id.value.startswith("linux-") or platform_id.value.startswith("osx-")
), "Only Windows, Linux and macOS platforms are supported for Groovy in multilspy at the moment"
# Check if user specified custom Java home path
java_home_path = None
java_path = None
if solidlsp_settings and solidlsp_settings.ls_specific_settings:
groovy_settings = solidlsp_settings.get_ls_specific_settings(Language.GROOVY)
custom_java_home = groovy_settings.get("ls_java_home_path")
if custom_java_home:
log.info(f"Using custom Java home path from configuration: {custom_java_home}")
java_home_path = custom_java_home
# Determine java executable path based on platform
if platform_id.value.startswith("win-"):
java_path = os.path.join(java_home_path, "bin", "java.exe")
else:
java_path = os.path.join(java_home_path, "bin", "java")
# If no custom Java home path, download and use bundled Java
if java_home_path is None:
# Runtime dependency information
runtime_dependencies = {
"java": {
"win-x64": {
"url": "https://github.com/redhat-developer/vscode-java/releases/download/v1.42.0/java-win32-x64-1.42.0-561.vsix",
"archiveType": "zip",
"java_home_path": "extension/jre/21.0.7-win32-x86_64",
"java_path": "extension/jre/21.0.7-win32-x86_64/bin/java.exe",
},
"linux-x64": {
"url": "https://github.com/redhat-developer/vscode-java/releases/download/v1.42.0/java-linux-x64-1.42.0-561.vsix",
"archiveType": "zip",
"java_home_path": "extension/jre/21.0.7-linux-x86_64",
"java_path": "extension/jre/21.0.7-linux-x86_64/bin/java",
},
"linux-arm64": {
"url": "https://github.com/redhat-developer/vscode-java/releases/download/v1.42.0/java-linux-arm64-1.42.0-561.vsix",
"archiveType": "zip",
"java_home_path": "extension/jre/21.0.7-linux-aarch64",
"java_path": "extension/jre/21.0.7-linux-aarch64/bin/java",
},
"osx-x64": {
"url": "https://github.com/redhat-developer/vscode-java/releases/download/v1.42.0/java-darwin-x64-1.42.0-561.vsix",
"archiveType": "zip",
"java_home_path": "extension/jre/21.0.7-macosx-x86_64",
"java_path": "extension/jre/21.0.7-macosx-x86_64/bin/java",
},
"osx-arm64": {
"url": "https://github.com/redhat-developer/vscode-java/releases/download/v1.42.0/java-darwin-arm64-1.42.0-561.vsix",
"archiveType": "zip",
"java_home_path": "extension/jre/21.0.7-macosx-aarch64",
"java_path": "extension/jre/21.0.7-macosx-aarch64/bin/java",
},
},
}
java_dependency = runtime_dependencies["java"][platform_id.value]
static_dir = os.path.join(cls.ls_resources_dir(solidlsp_settings), "groovy_language_server")
os.makedirs(static_dir, exist_ok=True)
java_dir = os.path.join(static_dir, "java")
os.makedirs(java_dir, exist_ok=True)
java_home_path = os.path.join(java_dir, java_dependency["java_home_path"])
java_path = os.path.join(java_dir, java_dependency["java_path"])
if not os.path.exists(java_path):
log.info(f"Downloading Java for {platform_id.value}...")
FileUtils.download_and_extract_archive(java_dependency["url"], java_dir, java_dependency["archiveType"])
if not platform_id.value.startswith("win-"):
os.chmod(java_path, 0o755)
assert java_path and os.path.exists(java_path), f"Java executable not found at {java_path}"
ls_jar_path = cls._find_groovy_ls_jar(solidlsp_settings)
return GroovyRuntimeDependencyPaths(java_path=java_path, java_home_path=java_home_path, ls_jar_path=ls_jar_path)
@classmethod
def _find_groovy_ls_jar(cls, solidlsp_settings: SolidLSPSettings) -> str:
"""
Find Groovy Language Server JAR file
"""
if solidlsp_settings and solidlsp_settings.ls_specific_settings:
groovy_settings = solidlsp_settings.get_ls_specific_settings(Language.GROOVY)
config_jar_path = groovy_settings.get("ls_jar_path")
if config_jar_path and os.path.exists(config_jar_path):
log.info(f"Using Groovy LS JAR from configuration: {config_jar_path}")
return config_jar_path
# if JAR not found
raise RuntimeError(
"Groovy Language Server JAR not found. To use Groovy language support:\n"
"Set 'ls_jar_path' in groovy settings in serena_config.yml:\n"
" ls_specific_settings:\n"
" groovy:\n"
" ls_jar_path: '/path/to/groovy-language-server.jar'\n"
" Ensure the JAR file is available at the configured path\n"
)
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the Groovy Language Server.
"""
if not os.path.isabs(repository_absolute_path):
repository_absolute_path = os.path.abspath(repository_absolute_path)
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"clientInfo": {"name": "Serena Groovy Client", "version": "1.0.0"},
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"capabilities": {
"textDocument": {
"synchronization": {"dynamicRegistration": True, "didSave": True},
"completion": {"dynamicRegistration": True},
"hover": {"dynamicRegistration": True, "contentFormat": ["markdown", "plaintext"]},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {"dynamicRegistration": True},
"workspaceSymbol": {"dynamicRegistration": True},
"signatureHelp": {"dynamicRegistration": True},
"rename": {"dynamicRegistration": True},
},
"workspace": {
"workspaceFolders": True,
},
},
"initializationOptions": {
"settings": {
"groovy": {
"classpath": [],
"diagnostics": {"enabled": True},
"completion": {"enabled": True},
}
},
},
"processId": os.getpid(),
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
}
return initialize_params # type: ignore
def _start_server(self) -> None:
"""
Starts the Groovy Language Server
"""
def execute_client_command_handler(params: dict) -> list:
return []
def do_nothing(params: dict) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
self.server.on_request("client/registerCapability", do_nothing)
self.server.on_notification("language/status", do_nothing)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_request("workspace/executeClientCommand", execute_client_command_handler)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
self.server.on_notification("language/actionableNotification", do_nothing)
log.info("Starting Groovy server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
capabilities = init_response["capabilities"]
assert "textDocumentSync" in capabilities, "Server must support textDocumentSync"
assert "hoverProvider" in capabilities, "Server must support hover"
assert "completionProvider" in capabilities, "Server must support code completion"
assert "signatureHelpProvider" in capabilities, "Server must support signature help"
assert "definitionProvider" in capabilities, "Server must support go to definition"
assert "referencesProvider" in capabilities, "Server must support find references"
assert "documentSymbolProvider" in capabilities, "Server must support document symbols"
assert "workspaceSymbolProvider" in capabilities, "Server must support workspace symbols"
self.server.notify.initialized({})
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/groovy_language_server.py",
"license": "MIT License",
"lines": 230,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/powershell_language_server.py | """
Provides PowerShell specific instantiation of the LanguageServer class using PowerShell Editor Services.
Contains various configurations and settings specific to PowerShell scripting.
"""
import logging
import os
import pathlib
import platform
import shutil
import tempfile
import threading
import zipfile
from pathlib import Path
import requests
from overrides import override
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
# PowerShell Editor Services version to download
PSES_VERSION = "4.4.0"
class PowerShellLanguageServer(SolidLanguageServer):
"""
Provides PowerShell specific instantiation of the LanguageServer class using PowerShell Editor Services.
Contains various configurations and settings specific to PowerShell scripting.
"""
@override
def is_ignored_dirname(self, dirname: str) -> bool:
# For PowerShell projects, ignore common build/output directories
return super().is_ignored_dirname(dirname) or dirname in [
"bin",
"obj",
".vscode",
"TestResults",
"Output",
]
@staticmethod
def _get_pwsh_path() -> str | None:
"""Get the path to PowerShell Core (pwsh) executable."""
# Check if pwsh is in PATH
pwsh = shutil.which("pwsh")
if pwsh:
return pwsh
# Check common installation locations
home = Path.home()
system = platform.system()
possible_paths: list[Path] = []
if system == "Windows":
possible_paths = [
Path(os.environ.get("PROGRAMFILES", "C:\\Program Files")) / "PowerShell" / "7" / "pwsh.exe",
Path(os.environ.get("PROGRAMFILES", "C:\\Program Files")) / "PowerShell" / "7-preview" / "pwsh.exe",
home / "AppData" / "Local" / "Microsoft" / "PowerShell" / "pwsh.exe",
]
elif system == "Darwin":
possible_paths = [
Path("/usr/local/bin/pwsh"),
Path("/opt/homebrew/bin/pwsh"),
home / ".dotnet" / "tools" / "pwsh",
]
else: # Linux
possible_paths = [
Path("/usr/bin/pwsh"),
Path("/usr/local/bin/pwsh"),
Path("/opt/microsoft/powershell/7/pwsh"),
home / ".dotnet" / "tools" / "pwsh",
]
for path in possible_paths:
if path.exists():
return str(path)
return None
@classmethod
def _get_pses_path(cls, solidlsp_settings: SolidLSPSettings) -> str | None:
"""Get the path to PowerShell Editor Services installation."""
install_dir = Path(cls.ls_resources_dir(solidlsp_settings)) / "powershell"
start_script = install_dir / "PowerShellEditorServices" / "Start-EditorServices.ps1"
if start_script.exists():
return str(start_script)
return None
@classmethod
def _download_pses(cls, solidlsp_settings: SolidLSPSettings) -> str:
"""Download and install PowerShell Editor Services."""
download_url = (
f"https://github.com/PowerShell/PowerShellEditorServices/releases/download/v{PSES_VERSION}/PowerShellEditorServices.zip"
)
# Create installation directory
install_dir = Path(cls.ls_resources_dir(solidlsp_settings)) / "powershell"
install_dir.mkdir(parents=True, exist_ok=True)
# Download the file
log.info(f"Downloading PowerShell Editor Services from {download_url}...")
response = requests.get(download_url, stream=True, timeout=120)
response.raise_for_status()
# Save the zip file
zip_path = install_dir / "PowerShellEditorServices.zip"
with open(zip_path, "wb") as f:
for chunk in response.iter_content(chunk_size=8192):
f.write(chunk)
log.info(f"Extracting PowerShell Editor Services to {install_dir}...")
with zipfile.ZipFile(zip_path, "r") as zip_ref:
zip_ref.extractall(install_dir)
# Clean up zip file
zip_path.unlink()
start_script = install_dir / "PowerShellEditorServices" / "Start-EditorServices.ps1"
if not start_script.exists():
raise RuntimeError(f"Failed to find Start-EditorServices.ps1 after extraction at {start_script}")
log.info(f"PowerShell Editor Services installed at: {install_dir}")
return str(start_script)
@classmethod
def _setup_runtime_dependency(cls, solidlsp_settings: SolidLSPSettings) -> tuple[str, str, str]:
"""
Check if required PowerShell runtime dependencies are available.
Downloads PowerShell Editor Services if not present.
Returns:
tuple: (pwsh_path, start_script_path, bundled_modules_path)
"""
# Check for PowerShell Core
pwsh_path = cls._get_pwsh_path()
if not pwsh_path:
raise RuntimeError(
"PowerShell Core (pwsh) is not installed or not in PATH. "
"Please install PowerShell 7+ from https://github.com/PowerShell/PowerShell"
)
# Check for PowerShell Editor Services
pses_path = cls._get_pses_path(solidlsp_settings)
if not pses_path:
log.info("PowerShell Editor Services not found. Downloading...")
pses_path = cls._download_pses(solidlsp_settings)
# The bundled modules path is the directory containing PowerShellEditorServices
bundled_modules_path = str(Path(pses_path).parent)
return pwsh_path, pses_path, bundled_modules_path
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
pwsh_path, pses_path, bundled_modules_path = self._setup_runtime_dependency(solidlsp_settings)
# Create a temp directory for PSES logs and session details
pses_temp_dir = Path(tempfile.gettempdir()) / "solidlsp_pses"
pses_temp_dir.mkdir(parents=True, exist_ok=True)
log_path = pses_temp_dir / "pses.log"
session_details_path = pses_temp_dir / "session.json"
# Build the command to start PowerShell Editor Services in stdio mode
# PSES requires several parameters beyond just -Stdio
# Using list format for robust argument handling - the PowerShell command
# after -Command must be a single string element
pses_command = (
f"& '{pses_path}' "
f"-HostName 'SolidLSP' "
f"-HostProfileId 'solidlsp' "
f"-HostVersion '1.0.0' "
f"-BundledModulesPath '{bundled_modules_path}' "
f"-LogPath '{log_path}' "
f"-LogLevel 'Information' "
f"-SessionDetailsPath '{session_details_path}' "
f"-Stdio"
)
cmd: list[str] = [
pwsh_path,
"-NoLogo",
"-NoProfile",
"-Command",
pses_command,
]
super().__init__(
config,
repository_root_path,
ProcessLaunchInfo(cmd=cmd, cwd=repository_root_path),
"powershell",
solidlsp_settings,
)
self.server_ready = threading.Event()
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the PowerShell Editor Services.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"locale": "en",
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"completion": {
"dynamicRegistration": True,
"completionItem": {
"snippetSupport": True,
"commitCharactersSupport": True,
"documentationFormat": ["markdown", "plaintext"],
"deprecatedSupport": True,
},
},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
"hover": {"dynamicRegistration": True, "contentFormat": ["markdown", "plaintext"]},
"signatureHelp": {
"dynamicRegistration": True,
"signatureInformation": {
"documentationFormat": ["markdown", "plaintext"],
"parameterInformation": {"labelOffsetSupport": True},
},
},
"codeAction": {"dynamicRegistration": True},
"formatting": {"dynamicRegistration": True},
"rangeFormatting": {"dynamicRegistration": True},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"configuration": True,
"symbol": {
"dynamicRegistration": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
},
},
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
}
return initialize_params # type: ignore[return-value]
def _start_server(self) -> None:
"""
Starts the PowerShell Editor Services, waits for the server to be ready.
"""
self._dynamic_capabilities: set[str] = set()
def register_capability_handler(params: dict) -> None:
"""Handle dynamic capability registration from PSES."""
registrations = params.get("registrations", [])
for reg in registrations:
method = reg.get("method", "")
log.info(f"PSES registered dynamic capability: {method}")
self._dynamic_capabilities.add(method)
# Mark server ready when we get document symbol registration
if method == "textDocument/documentSymbol":
self.server_ready.set()
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
# Check for PSES ready signals
message_text = msg.get("message", "")
if "started" in message_text.lower() or "ready" in message_text.lower():
log.info("PowerShell Editor Services ready signal detected")
self.server_ready.set()
def do_nothing(params: dict) -> None:
return
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
self.server.on_notification("powerShell/executionStatusChanged", do_nothing)
log.info("Starting PowerShell Editor Services process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
log.info(f"Received initialize response from PowerShell server: {init_response}")
# Verify server capabilities - PSES uses dynamic capability registration
# so we check for either static or dynamic capabilities
capabilities = init_response.get("capabilities", {})
log.info(f"Server capabilities: {capabilities}")
# Send initialized notification to trigger dynamic capability registration
self.server.notify.initialized({})
# Wait for server readiness with timeout
log.info("Waiting for PowerShell Editor Services to be ready...")
if not self.server_ready.wait(timeout=10.0):
# Fallback: assume server is ready after timeout
log.info("Timeout waiting for PSES ready signal, proceeding anyway")
self.server_ready.set()
else:
log.info("PowerShell Editor Services initialization complete")
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/powershell_language_server.py",
"license": "MIT License",
"lines": 278,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/taplo_server.py | """
Provides TOML specific instantiation of the LanguageServer class using Taplo.
Contains various configurations and settings specific to TOML files.
"""
import gzip
import hashlib
import logging
import os
import platform
import shutil
import socket
import stat
import urllib.request
from typing import Any
# Download timeout in seconds (prevents indefinite hangs)
DOWNLOAD_TIMEOUT_SECONDS = 120
from solidlsp.ls import LanguageServerDependencyProvider, LanguageServerDependencyProviderSinglePath, SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.ls_utils import PathUtils
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
# Taplo release version and download URLs
TAPLO_VERSION = "0.10.0"
TAPLO_DOWNLOAD_BASE = f"https://github.com/tamasfe/taplo/releases/download/{TAPLO_VERSION}"
# SHA256 checksums for Taplo releases (verified from official GitHub releases)
# Source: https://github.com/tamasfe/taplo/releases/tag/0.10.0
# To update: download each release file and run: sha256sum <filename>
TAPLO_SHA256_CHECKSUMS: dict[str, str] = {
"taplo-windows-x86_64.zip": "1615eed140039bd58e7089109883b1c434de5d6de8f64a993e6e8c80ca57bdf9",
"taplo-windows-x86.zip": "b825701daab10dcfc0251e6d668cd1a9c0e351e7f6762dd20844c3f3f3553aa0",
"taplo-darwin-x86_64.gz": "898122cde3a0b1cd1cbc2d52d3624f23338218c91b5ddb71518236a4c2c10ef2",
"taplo-darwin-aarch64.gz": "713734314c3e71894b9e77513c5349835eefbd52908445a0d73b0c7dc469347d",
"taplo-linux-x86_64.gz": "8fe196b894ccf9072f98d4e1013a180306e17d244830b03986ee5e8eabeb6156",
"taplo-linux-aarch64.gz": "033681d01eec8376c3fd38fa3703c79316f5e14bb013d859943b60a07bccdcc3",
"taplo-linux-armv7.gz": "6b728896afe2573522f38b8e668b1ff40eb5928fd9d6d0c253ecae508274d417",
}
def _verify_sha256(file_path: str, expected_hash: str) -> bool:
"""Verify SHA256 checksum of a downloaded file."""
sha256_hash = hashlib.sha256()
with open(file_path, "rb") as f:
for chunk in iter(lambda: f.read(8192), b""):
sha256_hash.update(chunk)
actual_hash = sha256_hash.hexdigest()
return actual_hash.lower() == expected_hash.lower()
def _get_taplo_download_url() -> tuple[str, str]:
"""
Get the appropriate Taplo download URL for the current platform.
Returns:
Tuple of (download_url, executable_name)
"""
system = platform.system().lower()
machine = platform.machine().lower()
# Map machine architecture to Taplo naming convention
arch_map = {
"x86_64": "x86_64",
"amd64": "x86_64",
"x86": "x86",
"i386": "x86",
"i686": "x86",
"aarch64": "aarch64",
"arm64": "aarch64",
"armv7l": "armv7",
}
arch = arch_map.get(machine, "x86_64") # Default to x86_64
if system == "windows":
filename = f"taplo-windows-{arch}.zip"
executable = "taplo.exe"
elif system == "darwin":
filename = f"taplo-darwin-{arch}.gz"
executable = "taplo"
else: # Linux and others
filename = f"taplo-linux-{arch}.gz"
executable = "taplo"
return f"{TAPLO_DOWNLOAD_BASE}/{filename}", executable
class TaploServer(SolidLanguageServer):
"""
Provides TOML specific instantiation of the LanguageServer class using Taplo.
Taplo is a TOML toolkit with LSP support for validation, formatting, and schema support.
"""
@staticmethod
def _determine_log_level(line: str) -> int:
"""Classify Taplo stderr output to avoid false-positive errors."""
line_lower = line.lower()
# Known informational messages from Taplo
if any(
[
"schema" in line_lower and "not found" in line_lower,
"warning" in line_lower,
]
):
return logging.DEBUG
return SolidLanguageServer._determine_log_level(line)
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Creates a TaploServer instance. This class is not meant to be instantiated directly.
Use LanguageServer.create() instead.
"""
super().__init__(
config,
repository_root_path,
None,
"toml",
solidlsp_settings,
)
def _create_dependency_provider(self) -> LanguageServerDependencyProvider:
return self.DependencyProvider(self._custom_settings, self._ls_resources_dir)
class DependencyProvider(LanguageServerDependencyProviderSinglePath):
def _get_or_install_core_dependency(self) -> str:
"""
Setup runtime dependencies for Taplo and return the command to start the server.
"""
# First check if taplo is already installed system-wide
system_taplo = shutil.which("taplo")
if system_taplo:
log.info(f"Using system-installed Taplo at: {system_taplo}")
return system_taplo
# Setup local installation directory
taplo_dir = os.path.join(self._ls_resources_dir, "taplo")
os.makedirs(taplo_dir, exist_ok=True)
_, executable_name = _get_taplo_download_url()
taplo_executable = os.path.join(taplo_dir, executable_name)
if os.path.exists(taplo_executable) and os.access(taplo_executable, os.X_OK):
log.info(f"Using cached Taplo at: {taplo_executable}")
return taplo_executable
# Download and install Taplo
log.info(f"Taplo not found. Downloading version {TAPLO_VERSION}...")
self._download_taplo(taplo_dir, taplo_executable)
if not os.path.exists(taplo_executable):
raise FileNotFoundError(
f"Taplo executable not found at {taplo_executable}. "
"Installation may have failed. Try installing manually: cargo install taplo-cli --locked"
)
return taplo_executable
def _create_launch_command(self, core_path: str) -> list[str]:
return [core_path, "lsp", "stdio"]
@classmethod
def _download_taplo(cls, install_dir: str, executable_path: str) -> None:
"""Download and extract Taplo binary with SHA256 verification."""
# TODO: consider using existing download utilities in SolidLSP instead of the custom logic here
download_url, _ = _get_taplo_download_url()
archive_filename = os.path.basename(download_url)
try:
log.info(f"Downloading Taplo from: {download_url}")
archive_path = os.path.join(install_dir, archive_filename)
# Download the archive with timeout to prevent indefinite hangs
old_timeout = socket.getdefaulttimeout()
try:
socket.setdefaulttimeout(DOWNLOAD_TIMEOUT_SECONDS)
urllib.request.urlretrieve(download_url, archive_path)
finally:
socket.setdefaulttimeout(old_timeout)
# Verify SHA256 checksum
expected_hash = TAPLO_SHA256_CHECKSUMS.get(archive_filename)
if expected_hash:
if not _verify_sha256(archive_path, expected_hash):
os.remove(archive_path)
raise RuntimeError(
f"SHA256 checksum verification failed for {archive_filename}. "
"The downloaded file may be corrupted or tampered with. "
"Try installing manually: cargo install taplo-cli --locked"
)
log.info(f"SHA256 checksum verified for {archive_filename}")
else:
log.warning(
f"No SHA256 checksum available for {archive_filename}. "
"Skipping verification - consider installing manually: cargo install taplo-cli --locked"
)
# Extract based on format
if archive_path.endswith(".gz") and not archive_path.endswith(".tar.gz"):
# Single file gzip
with gzip.open(archive_path, "rb") as f_in:
with open(executable_path, "wb") as f_out:
f_out.write(f_in.read())
elif archive_path.endswith(".zip"):
import zipfile
with zipfile.ZipFile(archive_path, "r") as zip_ref:
# Security: Validate paths to prevent zip slip vulnerability
for member in zip_ref.namelist():
member_path = os.path.normpath(os.path.join(install_dir, member))
if not member_path.startswith(os.path.normpath(install_dir)):
raise RuntimeError(f"Zip slip detected: {member} attempts to escape install directory")
zip_ref.extractall(install_dir)
# Make executable on Unix systems
if os.name != "nt":
os.chmod(executable_path, os.stat(executable_path).st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
# Clean up archive
os.remove(archive_path)
log.info(f"Taplo installed successfully at: {executable_path}")
except Exception as e:
log.error(f"Failed to download Taplo: {e}")
raise RuntimeError(
f"Failed to download Taplo from {download_url}. Try installing manually: cargo install taplo-cli --locked"
) from e
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the Taplo Language Server.
"""
root_uri = PathUtils.path_to_uri(repository_absolute_path)
initialize_params = {
"locale": "en",
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"completion": {"dynamicRegistration": True, "completionItem": {"snippetSupport": True}},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
"hover": {"dynamicRegistration": True, "contentFormat": ["markdown", "plaintext"]},
"codeAction": {"dynamicRegistration": True},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"symbol": {"dynamicRegistration": True},
},
},
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
}
return initialize_params # type: ignore
def _start_server(self) -> None:
"""
Starts the Taplo Language Server and initializes it.
"""
def register_capability_handler(params: Any) -> None:
return
def do_nothing(params: Any) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting Taplo server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request to Taplo server")
init_response = self.server.send.initialize(initialize_params)
log.debug(f"Received initialize response from Taplo: {init_response}")
# Verify document symbol support
capabilities = init_response.get("capabilities", {})
if capabilities.get("documentSymbolProvider"):
log.info("Taplo server supports document symbols")
else:
log.warning("Taplo server may have limited document symbol support")
self.server.notify.initialized({})
log.info("Taplo server initialization complete")
def is_ignored_dirname(self, dirname: str) -> bool:
"""Define TOML-specific directories to ignore."""
return super().is_ignored_dirname(dirname) or dirname in ["target", ".cargo", "node_modules"]
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/taplo_server.py",
"license": "MIT License",
"lines": 264,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/vue_language_server.py | """
Vue Language Server implementation using @vue/language-server (Volar) with companion TypeScript LS.
Operates in hybrid mode: Vue LS handles .vue files, TypeScript LS handles .ts/.js files.
"""
import logging
import os
import pathlib
import shutil
import threading
from pathlib import Path
from time import sleep
from typing import Any
from overrides import override
from solidlsp import ls_types
from solidlsp.language_servers.common import RuntimeDependency, RuntimeDependencyCollection
from solidlsp.language_servers.typescript_language_server import (
TypeScriptLanguageServer,
prefer_non_node_modules_definition,
)
from solidlsp.ls import LSPFileBuffer, SolidLanguageServer
from solidlsp.ls_config import Language, LanguageServerConfig
from solidlsp.ls_exceptions import SolidLSPException
from solidlsp.ls_types import Location
from solidlsp.ls_utils import PathUtils
from solidlsp.lsp_protocol_handler import lsp_types
from solidlsp.lsp_protocol_handler.lsp_types import DocumentSymbol, ExecuteCommandParams, InitializeParams, SymbolInformation
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class VueTypeScriptServer(TypeScriptLanguageServer):
"""TypeScript LS configured with @vue/typescript-plugin for Vue file support."""
@classmethod
@override
def get_language_enum_instance(cls) -> Language:
"""Return TYPESCRIPT since this is a TypeScript language server variant.
Note: VueTypeScriptServer is a companion server that uses TypeScript's language server
with the Vue TypeScript plugin. It reports as TYPESCRIPT to maintain compatibility
with the TypeScript language server infrastructure.
"""
return Language.TYPESCRIPT
class DependencyProvider(TypeScriptLanguageServer.DependencyProvider):
override_ts_ls_executable: str | None = None
def _get_or_install_core_dependency(self) -> str:
if self.override_ts_ls_executable is not None:
return self.override_ts_ls_executable
return super()._get_or_install_core_dependency()
@override
def _get_language_id_for_file(self, relative_file_path: str) -> str:
"""Return the correct language ID for files.
Vue files must be opened with language ID "vue" for the @vue/typescript-plugin
to process them correctly. The plugin is configured with "languages": ["vue"]
in the initialization options.
"""
ext = os.path.splitext(relative_file_path)[1].lower()
if ext == ".vue":
return "vue"
elif ext in (".ts", ".tsx", ".mts", ".cts"):
return "typescript"
elif ext in (".js", ".jsx", ".mjs", ".cjs"):
return "javascript"
else:
return "typescript"
def __init__(
self,
config: LanguageServerConfig,
repository_root_path: str,
solidlsp_settings: SolidLSPSettings,
vue_plugin_path: str,
tsdk_path: str,
ts_ls_executable_path: str,
):
self._vue_plugin_path = vue_plugin_path
self._custom_tsdk_path = tsdk_path
VueTypeScriptServer.DependencyProvider.override_ts_ls_executable = ts_ls_executable_path
super().__init__(config, repository_root_path, solidlsp_settings)
VueTypeScriptServer.DependencyProvider.override_ts_ls_executable = None
@override
def _get_initialize_params(self, repository_absolute_path: str) -> InitializeParams:
params = super()._get_initialize_params(repository_absolute_path)
params["initializationOptions"] = {
"plugins": [
{
"name": "@vue/typescript-plugin",
"location": self._vue_plugin_path,
"languages": ["vue"],
}
],
"tsserver": {
"path": self._custom_tsdk_path,
},
}
if "workspace" in params["capabilities"]:
params["capabilities"]["workspace"]["executeCommand"] = {"dynamicRegistration": True}
return params
@override
def _start_server(self) -> None:
def workspace_configuration_handler(params: dict) -> list:
items = params.get("items", [])
return [{} for _ in items]
self.server.on_request("workspace/configuration", workspace_configuration_handler)
super()._start_server()
class VueLanguageServer(SolidLanguageServer):
"""
Language server for Vue Single File Components using @vue/language-server (Volar) with companion TypeScript LS.
You can pass the following entries in ls_specific_settings["vue"]:
- vue_language_server_version: Version of @vue/language-server to install (default: "3.1.5")
Note: TypeScript versions are configured via ls_specific_settings["typescript"]:
- typescript_version: Version of TypeScript to install (default: "5.9.3")
- typescript_language_server_version: Version of typescript-language-server to install (default: "5.1.3")
"""
TS_SERVER_READY_TIMEOUT = 5.0
VUE_SERVER_READY_TIMEOUT = 3.0
# Windows requires more time due to slower I/O and process operations.
VUE_INDEXING_WAIT_TIME = 4.0 if os.name == "nt" else 2.0
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
vue_lsp_executable_path, self.tsdk_path, self._ts_ls_cmd = self._setup_runtime_dependencies(config, solidlsp_settings)
self._vue_ls_dir = os.path.join(self.ls_resources_dir(solidlsp_settings), "vue-lsp")
super().__init__(
config,
repository_root_path,
ProcessLaunchInfo(cmd=vue_lsp_executable_path, cwd=repository_root_path),
"vue",
solidlsp_settings,
)
self.server_ready = threading.Event()
self.initialize_searcher_command_available = threading.Event()
self._ts_server: VueTypeScriptServer | None = None
self._ts_server_started = False
self._vue_files_indexed = False
self._indexed_vue_file_uris: list[str] = []
@override
def is_ignored_dirname(self, dirname: str) -> bool:
return super().is_ignored_dirname(dirname) or dirname in [
"node_modules",
"dist",
"build",
"coverage",
".nuxt",
".output",
]
@override
def _get_language_id_for_file(self, relative_file_path: str) -> str:
ext = os.path.splitext(relative_file_path)[1].lower()
if ext == ".vue":
return "vue"
elif ext in (".ts", ".tsx", ".mts", ".cts"):
return "typescript"
elif ext in (".js", ".jsx", ".mjs", ".cjs"):
return "javascript"
else:
return "vue"
def _is_typescript_file(self, file_path: str) -> bool:
ext = os.path.splitext(file_path)[1].lower()
return ext in (".ts", ".tsx", ".mts", ".cts", ".js", ".jsx", ".mjs", ".cjs")
def _find_all_vue_files(self) -> list[str]:
vue_files = []
repo_path = Path(self.repository_root_path)
for vue_file in repo_path.rglob("*.vue"):
try:
relative_path = str(vue_file.relative_to(repo_path))
if "node_modules" not in relative_path and not relative_path.startswith("."):
vue_files.append(relative_path)
except Exception as e:
log.debug(f"Error processing Vue file {vue_file}: {e}")
return vue_files
def _ensure_vue_files_indexed_on_ts_server(self) -> None:
if self._vue_files_indexed:
return
assert self._ts_server is not None
log.info("Indexing .vue files on TypeScript server for cross-file references")
vue_files = self._find_all_vue_files()
log.debug(f"Found {len(vue_files)} .vue files to index")
for vue_file in vue_files:
try:
with self._ts_server.open_file(vue_file) as file_buffer:
file_buffer.ref_count += 1
self._indexed_vue_file_uris.append(file_buffer.uri)
except Exception as e:
log.debug(f"Failed to open {vue_file} on TS server: {e}")
self._vue_files_indexed = True
log.info("Vue file indexing on TypeScript server complete")
sleep(self._get_vue_indexing_wait_time())
log.debug("Wait period after Vue file indexing complete")
def _get_vue_indexing_wait_time(self) -> float:
return self.VUE_INDEXING_WAIT_TIME
def _send_references_request(self, relative_file_path: str, line: int, column: int) -> list[lsp_types.Location] | None:
uri = PathUtils.path_to_uri(os.path.join(self.repository_root_path, relative_file_path))
request_params = {
"textDocument": {"uri": uri},
"position": {"line": line, "character": column},
"context": {"includeDeclaration": False},
}
return self.server.send.references(request_params) # type: ignore[arg-type]
def _send_ts_references_request(self, relative_file_path: str, line: int, column: int) -> list[ls_types.Location]:
assert self._ts_server is not None
uri = PathUtils.path_to_uri(os.path.join(self.repository_root_path, relative_file_path))
request_params = {
"textDocument": {"uri": uri},
"position": {"line": line, "character": column},
"context": {"includeDeclaration": True},
}
with self._ts_server.open_file(relative_file_path):
response = self._ts_server.handler.send.references(request_params) # type: ignore[arg-type]
result: list[ls_types.Location] = []
if response is not None:
for item in response:
abs_path = PathUtils.uri_to_path(item["uri"])
if not Path(abs_path).is_relative_to(self.repository_root_path):
log.debug(f"Found reference outside repository: {abs_path}, skipping")
continue
rel_path = Path(abs_path).relative_to(self.repository_root_path)
if self.is_ignored_path(str(rel_path)):
log.debug(f"Ignoring reference in {rel_path}")
continue
new_item: dict = {}
new_item.update(item) # type: ignore[arg-type]
new_item["absolutePath"] = str(abs_path)
new_item["relativePath"] = str(rel_path)
result.append(ls_types.Location(**new_item)) # type: ignore
return result
def request_file_references(self, relative_file_path: str) -> list:
if not self.server_started:
log.error("request_file_references called before Language Server started")
raise SolidLSPException("Language Server not started")
absolute_file_path = os.path.join(self.repository_root_path, relative_file_path)
uri = PathUtils.path_to_uri(absolute_file_path)
request_params = {"textDocument": {"uri": uri}}
log.info(f"Sending volar/client/findFileReference request for {relative_file_path}")
log.info(f"Request URI: {uri}")
log.info(f"Request params: {request_params}")
try:
with self.open_file(relative_file_path):
log.debug(f"Sending volar/client/findFileReference for {relative_file_path}")
log.debug(f"Request params: {request_params}")
response = self.server.send_request("volar/client/findFileReference", request_params)
log.debug(f"Received response type: {type(response)}")
log.info(f"Received file references response: {response}")
log.info(f"Response type: {type(response)}")
if response is None:
log.debug(f"No file references found for {relative_file_path}")
return []
# Response should be an array of Location objects
if not isinstance(response, list):
log.warning(f"Unexpected response format from volar/client/findFileReference: {type(response)}")
return []
ret: list[Location] = []
for item in response:
if not isinstance(item, dict) or "uri" not in item:
log.debug(f"Skipping invalid location item: {item}")
continue
abs_path = PathUtils.uri_to_path(item["uri"]) # type: ignore[arg-type]
if not Path(abs_path).is_relative_to(self.repository_root_path):
log.warning(f"Found file reference outside repository: {abs_path}, skipping")
continue
rel_path = Path(abs_path).relative_to(self.repository_root_path)
if self.is_ignored_path(str(rel_path)):
log.debug(f"Ignoring file reference in {rel_path}")
continue
new_item: dict = {}
new_item.update(item) # type: ignore[arg-type]
new_item["absolutePath"] = str(abs_path)
new_item["relativePath"] = str(rel_path)
ret.append(Location(**new_item)) # type: ignore
log.debug(f"Found {len(ret)} file references for {relative_file_path}")
return ret
except Exception as e:
log.warning(f"Error requesting file references for {relative_file_path}: {e}")
return []
@override
def request_references(self, relative_file_path: str, line: int, column: int) -> list[ls_types.Location]:
if not self.server_started:
log.error("request_references called before Language Server started")
raise SolidLSPException("Language Server not started")
if not self._has_waited_for_cross_file_references:
sleep(self._get_wait_time_for_cross_file_referencing())
self._has_waited_for_cross_file_references = True
self._ensure_vue_files_indexed_on_ts_server()
symbol_refs = self._send_ts_references_request(relative_file_path, line=line, column=column)
if relative_file_path.endswith(".vue"):
log.info(f"Attempting to find file-level references for Vue component {relative_file_path}")
file_refs = self.request_file_references(relative_file_path)
log.info(f"file_refs result: {len(file_refs)} references found")
seen = set()
for ref in symbol_refs:
key = (ref["uri"], ref["range"]["start"]["line"], ref["range"]["start"]["character"])
seen.add(key)
for file_ref in file_refs:
key = (file_ref["uri"], file_ref["range"]["start"]["line"], file_ref["range"]["start"]["character"])
if key not in seen:
symbol_refs.append(file_ref)
seen.add(key)
log.info(f"Total references for {relative_file_path}: {len(symbol_refs)} (symbol refs + file refs, deduplicated)")
return symbol_refs
@override
def request_definition(self, relative_file_path: str, line: int, column: int) -> list[ls_types.Location]:
if not self.server_started:
log.error("request_definition called before Language Server started")
raise SolidLSPException("Language Server not started")
assert self._ts_server is not None
with self._ts_server.open_file(relative_file_path):
return self._ts_server.request_definition(relative_file_path, line, column)
@override
def request_rename_symbol_edit(self, relative_file_path: str, line: int, column: int, new_name: str) -> ls_types.WorkspaceEdit | None:
if not self.server_started:
log.error("request_rename_symbol_edit called before Language Server started")
raise SolidLSPException("Language Server not started")
assert self._ts_server is not None
with self._ts_server.open_file(relative_file_path):
return self._ts_server.request_rename_symbol_edit(relative_file_path, line, column, new_name)
@classmethod
def _setup_runtime_dependencies(cls, config: LanguageServerConfig, solidlsp_settings: SolidLSPSettings) -> tuple[list[str], str, str]:
is_node_installed = shutil.which("node") is not None
assert is_node_installed, "node is not installed or isn't in PATH. Please install NodeJS and try again."
is_npm_installed = shutil.which("npm") is not None
assert is_npm_installed, "npm is not installed or isn't in PATH. Please install npm and try again."
# Get TypeScript version settings from TypeScript language server settings
typescript_config = solidlsp_settings.get_ls_specific_settings(Language.TYPESCRIPT)
typescript_version = typescript_config.get("typescript_version", "5.9.3")
typescript_language_server_version = typescript_config.get("typescript_language_server_version", "5.1.3")
vue_config = solidlsp_settings.get_ls_specific_settings(Language.VUE)
vue_language_server_version = vue_config.get("vue_language_server_version", "3.1.5")
deps = RuntimeDependencyCollection(
[
RuntimeDependency(
id="vue-language-server",
description="Vue language server package (Volar)",
command=["npm", "install", "--prefix", "./", f"@vue/language-server@{vue_language_server_version}"],
platform_id="any",
),
RuntimeDependency(
id="typescript",
description="TypeScript (required for tsdk)",
command=["npm", "install", "--prefix", "./", f"typescript@{typescript_version}"],
platform_id="any",
),
RuntimeDependency(
id="typescript-language-server",
description="TypeScript language server (for Vue LS 3.x tsserver forwarding)",
command=[
"npm",
"install",
"--prefix",
"./",
f"typescript-language-server@{typescript_language_server_version}",
],
platform_id="any",
),
]
)
vue_ls_dir = os.path.join(cls.ls_resources_dir(solidlsp_settings), "vue-lsp")
vue_executable_path = os.path.join(vue_ls_dir, "node_modules", ".bin", "vue-language-server")
ts_ls_executable_path = os.path.join(vue_ls_dir, "node_modules", ".bin", "typescript-language-server")
if os.name == "nt":
vue_executable_path += ".cmd"
ts_ls_executable_path += ".cmd"
tsdk_path = os.path.join(vue_ls_dir, "node_modules", "typescript", "lib")
# Check if installation is needed based on executables AND version
version_file = os.path.join(vue_ls_dir, ".installed_version")
expected_version = f"{vue_language_server_version}_{typescript_version}_{typescript_language_server_version}"
needs_install = False
if not os.path.exists(vue_executable_path) or not os.path.exists(ts_ls_executable_path):
log.info("Vue/TypeScript Language Server executables not found.")
needs_install = True
elif os.path.exists(version_file):
with open(version_file) as f:
installed_version = f.read().strip()
if installed_version != expected_version:
log.info(
f"Vue Language Server version mismatch: installed={installed_version}, expected={expected_version}. Reinstalling..."
)
needs_install = True
else:
# No version file exists, assume old installation needs refresh
log.info("Vue Language Server version file not found. Reinstalling to ensure correct version...")
needs_install = True
if needs_install:
log.info("Installing Vue/TypeScript Language Server dependencies...")
deps.install(vue_ls_dir)
# Write version marker file
with open(version_file, "w") as f:
f.write(expected_version)
log.info("Vue language server dependencies installed successfully")
if not os.path.exists(vue_executable_path):
raise FileNotFoundError(
f"vue-language-server executable not found at {vue_executable_path}, something went wrong with the installation."
)
if not os.path.exists(ts_ls_executable_path):
raise FileNotFoundError(
f"typescript-language-server executable not found at {ts_ls_executable_path}, something went wrong with the installation."
)
return [vue_executable_path, "--stdio"], tsdk_path, ts_ls_executable_path
def _get_initialize_params(self, repository_absolute_path: str) -> InitializeParams:
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"locale": "en",
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"completion": {"dynamicRegistration": True, "completionItem": {"snippetSupport": True}},
"definition": {"dynamicRegistration": True, "linkSupport": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
"hover": {"dynamicRegistration": True, "contentFormat": ["markdown", "plaintext"]},
"signatureHelp": {"dynamicRegistration": True},
"codeAction": {"dynamicRegistration": True},
"rename": {"dynamicRegistration": True, "prepareSupport": True},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"symbol": {"dynamicRegistration": True},
},
},
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
"initializationOptions": {
"vue": {
"hybridMode": True,
},
"typescript": {
"tsdk": self.tsdk_path,
},
},
}
return initialize_params # type: ignore
def _start_typescript_server(self) -> None:
try:
vue_ts_plugin_path = os.path.join(self._vue_ls_dir, "node_modules", "@vue", "typescript-plugin")
ts_config = LanguageServerConfig(
code_language=Language.TYPESCRIPT,
trace_lsp_communication=False,
)
log.info("Creating companion VueTypeScriptServer")
self._ts_server = VueTypeScriptServer(
config=ts_config,
repository_root_path=self.repository_root_path,
solidlsp_settings=self._solidlsp_settings,
vue_plugin_path=vue_ts_plugin_path,
tsdk_path=self.tsdk_path,
ts_ls_executable_path=self._ts_ls_cmd,
)
log.info("Starting companion TypeScript server")
self._ts_server.start()
log.info("Waiting for companion TypeScript server to be ready...")
if not self._ts_server.server_ready.wait(timeout=self.TS_SERVER_READY_TIMEOUT):
log.warning(
f"Timeout waiting for companion TypeScript server to be ready after {self.TS_SERVER_READY_TIMEOUT} seconds, proceeding anyway"
)
self._ts_server.server_ready.set()
self._ts_server_started = True
log.info("Companion TypeScript server ready")
except Exception as e:
log.error(f"Error starting TypeScript server: {e}")
self._ts_server = None
self._ts_server_started = False
raise
def _forward_tsserver_request(self, method: str, params: dict) -> Any:
if self._ts_server is None:
log.error("Cannot forward tsserver request - TypeScript server not started")
return None
try:
execute_params: ExecuteCommandParams = {
"command": "typescript.tsserverRequest",
"arguments": [method, params, {"isAsync": True, "lowPriority": True}],
}
result = self._ts_server.handler.send.execute_command(execute_params)
log.debug(f"TypeScript server raw response for {method}: {result}")
if isinstance(result, dict) and "body" in result:
return result["body"]
return result
except Exception as e:
log.error(f"Error forwarding tsserver request {method}: {e}")
return None
def _cleanup_indexed_vue_files(self) -> None:
if not self._indexed_vue_file_uris or self._ts_server is None:
return
log.debug(f"Cleaning up {len(self._indexed_vue_file_uris)} indexed Vue files")
for uri in self._indexed_vue_file_uris:
try:
if uri in self._ts_server.open_file_buffers:
file_buffer = self._ts_server.open_file_buffers[uri]
file_buffer.ref_count -= 1
if file_buffer.ref_count == 0:
self._ts_server.server.notify.did_close_text_document({"textDocument": {"uri": uri}})
del self._ts_server.open_file_buffers[uri]
log.debug(f"Closed indexed Vue file: {uri}")
except Exception as e:
log.debug(f"Error closing indexed Vue file {uri}: {e}")
self._indexed_vue_file_uris.clear()
def _stop_typescript_server(self) -> None:
if self._ts_server is not None:
try:
log.info("Stopping companion TypeScript server")
self._ts_server.stop()
except Exception as e:
log.warning(f"Error stopping TypeScript server: {e}")
finally:
self._ts_server = None
self._ts_server_started = False
@override
def _start_server(self) -> None:
self._start_typescript_server()
def register_capability_handler(params: dict) -> None:
assert "registrations" in params
for registration in params["registrations"]:
if registration["method"] == "workspace/executeCommand":
self.initialize_searcher_command_available.set()
return
def configuration_handler(params: dict) -> list:
items = params.get("items", [])
return [{} for _ in items]
def do_nothing(params: dict) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
message_text = msg.get("message", "")
if "initialized" in message_text.lower() or "ready" in message_text.lower():
log.info("Vue language server ready signal detected")
self.server_ready.set()
def tsserver_request_notification_handler(params: list) -> None:
try:
if params and len(params) > 0 and len(params[0]) >= 2:
request_id = params[0][0]
method = params[0][1]
method_params = params[0][2] if len(params[0]) > 2 else {}
log.debug(f"Received tsserver/request: id={request_id}, method={method}")
if method == "_vue:projectInfo":
file_path = method_params.get("file", "")
tsconfig_path = self._find_tsconfig_for_file(file_path)
result = {"configFileName": tsconfig_path} if tsconfig_path else None
response = [[request_id, result]]
self.server.notify.send_notification("tsserver/response", response)
log.debug(f"Sent tsserver/response for projectInfo: {tsconfig_path}")
else:
result = self._forward_tsserver_request(method, method_params)
response = [[request_id, result]]
self.server.notify.send_notification("tsserver/response", response)
log.debug(f"Forwarded tsserver/response for {method}: {result}")
else:
log.warning(f"Unexpected tsserver/request params format: {params}")
except Exception as e:
log.error(f"Error handling tsserver/request: {e}")
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_request("workspace/configuration", configuration_handler)
self.server.on_notification("tsserver/request", tsserver_request_notification_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting Vue server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
log.debug(f"Received initialize response from Vue server: {init_response}")
assert init_response["capabilities"]["textDocumentSync"] in [1, 2]
self.server.notify.initialized({})
log.info("Waiting for Vue language server to be ready...")
if not self.server_ready.wait(timeout=self.VUE_SERVER_READY_TIMEOUT):
log.info("Timeout waiting for Vue server ready signal, proceeding anyway")
self.server_ready.set()
else:
log.info("Vue server initialization complete")
def _find_tsconfig_for_file(self, file_path: str) -> str | None:
if not file_path:
tsconfig_path = os.path.join(self.repository_root_path, "tsconfig.json")
return tsconfig_path if os.path.exists(tsconfig_path) else None
current_dir = os.path.dirname(file_path)
repo_root = os.path.abspath(self.repository_root_path)
while current_dir and current_dir.startswith(repo_root):
tsconfig_path = os.path.join(current_dir, "tsconfig.json")
if os.path.exists(tsconfig_path):
return tsconfig_path
parent = os.path.dirname(current_dir)
if parent == current_dir:
break
current_dir = parent
tsconfig_path = os.path.join(repo_root, "tsconfig.json")
return tsconfig_path if os.path.exists(tsconfig_path) else None
@override
def _get_wait_time_for_cross_file_referencing(self) -> float:
return 5.0
@override
def stop(self, shutdown_timeout: float = 5.0) -> None:
self._cleanup_indexed_vue_files()
self._stop_typescript_server()
super().stop(shutdown_timeout)
@override
def _get_preferred_definition(self, definitions: list[ls_types.Location]) -> ls_types.Location:
return prefer_non_node_modules_definition(definitions)
@override
def _request_document_symbols(
self, relative_file_path: str, file_data: LSPFileBuffer | None
) -> list[SymbolInformation] | list[DocumentSymbol] | None:
"""
Override to filter out shorthand property references in Vue files.
In Vue, when using shorthand syntax in defineExpose like `defineExpose({ pressCount })`,
the Vue LSP returns both:
- The Variable definition (e.g., `const pressCount = ref(0)`)
- A Property symbol for the shorthand reference (e.g., `pressCount` in defineExpose)
This causes duplicate symbols with the same name, which breaks symbol lookup.
We filter out Property symbols that have a matching Variable with the same name
at a different location (the definition), keeping only the definition.
"""
symbols = super()._request_document_symbols(relative_file_path, file_data)
if symbols is None or len(symbols) == 0:
return symbols
# Only process DocumentSymbol format (hierarchical symbols with children)
# SymbolInformation format doesn't have the same issue
if not isinstance(symbols[0], dict) or "range" not in symbols[0]:
return symbols
return self._filter_shorthand_property_duplicates(symbols)
def _filter_shorthand_property_duplicates(
self, symbols: list[DocumentSymbol] | list[SymbolInformation]
) -> list[DocumentSymbol] | list[SymbolInformation]:
"""
Filter out Property symbols that have a matching Variable symbol with the same name.
This handles Vue's shorthand property syntax in defineExpose, where the same
identifier appears as both a Variable definition and a Property reference.
"""
VARIABLE_KIND = 13 # SymbolKind.Variable
PROPERTY_KIND = 7 # SymbolKind.Property
def filter_symbols(syms: list[dict]) -> list[dict]:
# Collect all Variable symbol names with their line numbers
variable_names: dict[str, set[int]] = {}
for sym in syms:
if sym.get("kind") == VARIABLE_KIND:
name = sym.get("name", "")
line = sym.get("range", {}).get("start", {}).get("line", -1)
if name not in variable_names:
variable_names[name] = set()
variable_names[name].add(line)
# Filter: keep symbols that are either:
# 1. Not a Property, or
# 2. A Property without a matching Variable name at a different location
filtered = []
for sym in syms:
name = sym.get("name", "")
kind = sym.get("kind")
line = sym.get("range", {}).get("start", {}).get("line", -1)
# If it's a Property with a matching Variable name at a DIFFERENT line, skip it
if kind == PROPERTY_KIND and name in variable_names:
# Check if there's a Variable definition at a different line
var_lines = variable_names[name]
if any(var_line != line for var_line in var_lines):
# This is a shorthand reference, skip it
log.debug(
f"Filtering shorthand property reference '{name}' at line {line} "
f"(Variable definition exists at line(s) {var_lines})"
)
continue
# Recursively filter children
children = sym.get("children", [])
if children:
sym = dict(sym) # Create a copy to avoid mutating the original
sym["children"] = filter_symbols(children)
filtered.append(sym)
return filtered
return filter_symbols(list(symbols)) # type: ignore
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/vue_language_server.py",
"license": "MIT License",
"lines": 669,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:test/solidlsp/fsharp/test_fsharp_basic.py | import os
import threading
from typing import Any
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
from solidlsp.ls_utils import SymbolUtils
from test.conftest import is_ci
@pytest.mark.fsharp
class TestFSharpLanguageServer:
@pytest.mark.parametrize("language_server", [Language.FSHARP], indirect=True)
def test_find_symbol(self, language_server: SolidLanguageServer) -> None:
"""Test finding symbols in the full symbol tree."""
symbols = language_server.request_full_symbol_tree()
# Check for main program module symbols
assert SymbolUtils.symbol_tree_contains_name(symbols, "Program"), "Program module not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "main"), "main function not found in symbol tree"
# Check for Calculator module symbols
assert SymbolUtils.symbol_tree_contains_name(symbols, "Calculator"), "Calculator module not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "add"), "add function not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "CalculatorClass"), "CalculatorClass not found in symbol tree"
@pytest.mark.parametrize("language_server", [Language.FSHARP], indirect=True)
def test_get_document_symbols_program(self, language_server: SolidLanguageServer) -> None:
"""Test getting document symbols from the main Program.fs file."""
file_path = os.path.join("Program.fs")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()[0]
# Look for expected functions and modules
symbol_names = [s.get("name") for s in symbols]
assert "main" in symbol_names, "main function not found in Program.fs symbols"
@pytest.mark.parametrize("language_server", [Language.FSHARP], indirect=True)
def test_get_document_symbols_calculator(self, language_server: SolidLanguageServer) -> None:
"""Test getting document symbols from Calculator.fs file."""
file_path = os.path.join("Calculator.fs")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()[0]
# Look for expected functions
symbol_names = [s.get("name") for s in symbols]
expected_symbols = ["add", "subtract", "multiply", "divide", "square", "factorial", "CalculatorClass"]
for expected in expected_symbols:
assert expected in symbol_names, f"{expected} function not found in Calculator.fs symbols"
@pytest.mark.xfail(is_ci, reason="Test is flaky") # TODO: Re-enable if the LS can be made more reliable #1040
@pytest.mark.parametrize("language_server", [Language.FSHARP], indirect=True)
def test_find_referencing_symbols(self, language_server: SolidLanguageServer) -> None:
"""Test finding references using symbol selection range."""
file_path = os.path.join("Calculator.fs")
symbols = language_server.request_document_symbols(file_path)
# Find the 'add' function symbol
add_symbol = None
for sym in symbols.iter_symbols():
if sym.get("name") == "add":
add_symbol = sym
break
assert add_symbol is not None, "Could not find 'add' function symbol in Calculator.fs"
# Try to find references to the add function
sel_start = add_symbol["selectionRange"]["start"]
refs = language_server.request_references(file_path, sel_start["line"], sel_start["character"] + 1)
# The add function should be referenced in Program.fs
assert any("Program.fs" in ref.get("relativePath", "") for ref in refs), "Program.fs should reference add function"
@pytest.mark.parametrize("language_server", [Language.FSHARP], indirect=True)
def test_nested_module_symbols(self, language_server: SolidLanguageServer) -> None:
"""Test getting symbols from nested Models namespace."""
file_path = os.path.join("Models", "Person.fs")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()[0]
# Check for expected types and modules
symbol_names = [s.get("name") for s in symbols]
expected_symbols = ["Person", "PersonModule", "Address", "Employee"]
for expected in expected_symbols:
assert expected in symbol_names, f"{expected} not found in Person.fs symbols"
@pytest.mark.xfail(is_ci, reason="Test is flaky") # TODO: Re-enable if the LS can be made more reliable #1040
@pytest.mark.parametrize("language_server", [Language.FSHARP], indirect=True)
def test_find_referencing_symbols_across_files(self, language_server: SolidLanguageServer) -> None:
"""Test finding references to Calculator functions across files."""
# Find the subtract function in Calculator.fs
file_path = os.path.join("Calculator.fs")
symbols = language_server.request_document_symbols(file_path)
subtract_symbol = None
for sym in symbols.iter_symbols():
if sym.get("name") == "subtract":
subtract_symbol = sym
break
assert subtract_symbol is not None, "Could not find 'subtract' function symbol"
# Find references to subtract function
sel_start = subtract_symbol["selectionRange"]["start"]
refs = language_server.request_references(file_path, sel_start["line"], sel_start["character"] + 1)
# The subtract function should be referenced in Program.fs
assert any("Program.fs" in ref.get("relativePath", "") for ref in refs), "Program.fs should reference subtract function"
@pytest.mark.xfail(is_ci, reason="Test is flaky") # TODO: Re-enable if the LS can be made more reliable #1040
@pytest.mark.parametrize("language_server", [Language.FSHARP], indirect=True)
def test_go_to_definition(self, language_server: SolidLanguageServer) -> None:
"""Test go-to-definition functionality."""
# Test going to definition of 'add' function from Program.fs
program_file = os.path.join("Program.fs")
# Try to find definition of 'add' function used in Program.fs
# This would typically be at the line where 'add 5 3' is called
definitions = language_server.request_definition(program_file, 10, 20) # Approximate position
# We should get at least some definitions
assert len(definitions) >= 0, "Should get definitions (even if empty for complex cases)"
@pytest.mark.xfail(is_ci, reason="Test is flaky") # TODO: Re-enable if the LS can be made more reliable #1040
@pytest.mark.parametrize("language_server", [Language.FSHARP], indirect=True)
def test_hover_information(self, language_server: SolidLanguageServer) -> None:
"""Test hover information functionality."""
file_path = os.path.join("Calculator.fs")
# Try to get hover information for a function
hover_info = language_server.request_hover(file_path, 5, 10) # Approximate position of a function
# Hover info might be None or contain information
# This is acceptable as it depends on the LSP server's capabilities and timing
assert hover_info is None or isinstance(hover_info, dict), "Hover info should be None or dict"
@pytest.mark.parametrize("language_server", [Language.FSHARP], indirect=True)
def test_completion(self, language_server: SolidLanguageServer) -> None:
"""Test code completion functionality."""
file_path = os.path.join("Program.fs")
# Use threading for cross-platform timeout (signal.SIGALRM is Unix-only)
result: dict[str, Any] = dict(value=None)
exception: dict[str, Any] = dict(value=None)
def run_completion():
try:
result["value"] = language_server.request_completions(file_path, 15, 10)
except Exception as e:
exception["value"] = e
thread = threading.Thread(target=run_completion, daemon=True)
thread.start()
thread.join(timeout=5) # 5 second timeout
if thread.is_alive():
# Completion timed out, but this is acceptable for F# in some cases
# The important thing is that the language server doesn't crash
return
if exception["value"]:
raise exception["value"]
assert isinstance(result["value"], list), "Completions should be a list"
@pytest.mark.parametrize("language_server", [Language.FSHARP], indirect=True)
def test_diagnostics(self, language_server: SolidLanguageServer) -> None:
"""Test getting diagnostics (errors, warnings) from F# files."""
file_path = os.path.join("Program.fs")
# FsAutoComplete uses publishDiagnostics notifications instead of textDocument/diagnostic requests
# So we'll test that the language server can handle files without crashing
# In real usage, diagnostics would come through the publishDiagnostics notification handler
# Test that we can at least work with the file (open/close cycle)
with language_server.open_file(file_path) as _:
# If we can open and close the file without errors, basic diagnostics support is working
pass
# This is a successful test - FsAutoComplete is working with F# files
assert True, "F# language server can handle files successfully"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/fsharp/test_fsharp_basic.py",
"license": "MIT License",
"lines": 141,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/groovy/test_groovy_basic.py | import os
from pathlib import Path
import pytest
from serena.constants import SERENA_MANAGED_DIR_NAME
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language, LanguageServerConfig
from solidlsp.ls_utils import SymbolUtils
from solidlsp.settings import SolidLSPSettings
@pytest.mark.groovy
class TestGroovyLanguageServer:
language_server: SolidLanguageServer | None = None
test_repo_path: Path = Path(__file__).parent.parent.parent / "resources" / "repos" / "groovy" / "test_repo"
@classmethod
def setup_class(cls):
"""
Set up test class with Groovy test repository.
"""
if not cls.test_repo_path.exists():
pytest.skip("Groovy test repository not found")
# Use JAR path from environment variable
ls_jar_path = os.environ.get("GROOVY_LS_JAR_PATH")
if not ls_jar_path or not os.path.exists(ls_jar_path):
pytest.skip(
"Groovy Language Server JAR not found. Set GROOVY_LS_JAR_PATH environment variable to run tests.",
allow_module_level=True,
)
# Get JAR options from environment variable
ls_jar_options = os.environ.get("GROOVY_LS_JAR_OPTIONS", "")
ls_java_home_path = os.environ.get("GROOVY_LS_JAVA_HOME_PATH")
groovy_settings = {"ls_jar_path": ls_jar_path, "ls_jar_options": ls_jar_options}
if ls_java_home_path:
groovy_settings["ls_java_home_path"] = ls_java_home_path
# Create language server directly with Groovy-specific settings
repo_path = str(cls.test_repo_path)
config = LanguageServerConfig(code_language=Language.GROOVY, ignored_paths=[], trace_lsp_communication=False)
project_data_path = os.path.join(repo_path, SERENA_MANAGED_DIR_NAME)
solidlsp_settings = SolidLSPSettings(
solidlsp_dir=str(Path.home() / ".serena"),
project_data_path=project_data_path,
ls_specific_settings={Language.GROOVY: groovy_settings},
)
cls.language_server = SolidLanguageServer.create(config, repo_path, solidlsp_settings=solidlsp_settings)
cls.language_server.start()
@classmethod
def teardown_class(cls):
"""
Clean up language server.
"""
if cls.language_server is not None:
cls.language_server.stop()
def test_find_symbol(self) -> None:
assert self.language_server is not None
symbols = self.language_server.request_full_symbol_tree()
assert SymbolUtils.symbol_tree_contains_name(symbols, "Main"), "Main class not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "Utils"), "Utils class not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "Model"), "Model class not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "ModelUser"), "ModelUser class not found in symbol tree"
def test_find_referencing_class_symbols(self) -> None:
assert self.language_server is not None
file_path = os.path.join("src", "main", "groovy", "com", "example", "Utils.groovy")
refs = self.language_server.request_references(file_path, 3, 6)
assert any("Main.groovy" in ref.get("relativePath", "") for ref in refs), "Utils should be referenced from Main.groovy"
file_path = os.path.join("src", "main", "groovy", "com", "example", "Model.groovy")
symbols = self.language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
model_symbol = None
for sym in symbols[0]:
if sym.get("name") == "com.example.Model" and sym.get("kind") == 5:
model_symbol = sym
break
assert model_symbol is not None, "Could not find 'Model' class symbol in Model.groovy"
if "selectionRange" in model_symbol:
sel_start = model_symbol["selectionRange"]["start"]
else:
sel_start = model_symbol["range"]["start"]
refs = self.language_server.request_references(file_path, sel_start["line"], sel_start["character"])
main_refs = [ref for ref in refs if "Main.groovy" in ref.get("relativePath", "")]
assert len(main_refs) >= 2, f"Model should be referenced from Main.groovy at least 2 times, found {len(main_refs)}"
model_user_refs = [ref for ref in refs if "ModelUser.groovy" in ref.get("relativePath", "")]
assert len(model_user_refs) >= 1, f"Model should be referenced from ModelUser.groovy at least 1 time, found {len(model_user_refs)}"
def test_overview_methods(self) -> None:
assert self.language_server is not None
symbols = self.language_server.request_full_symbol_tree()
assert SymbolUtils.symbol_tree_contains_name(symbols, "Main"), "Main missing from overview"
assert SymbolUtils.symbol_tree_contains_name(symbols, "Utils"), "Utils missing from overview"
assert SymbolUtils.symbol_tree_contains_name(symbols, "Model"), "Model missing from overview"
assert SymbolUtils.symbol_tree_contains_name(symbols, "ModelUser"), "ModelUser missing from overview"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/groovy/test_groovy_basic.py",
"license": "MIT License",
"lines": 86,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/powershell/test_powershell_basic.py | """
Basic integration tests for the PowerShell language server functionality.
These tests validate the functionality of the language server APIs
like request_document_symbols using the PowerShell test repository.
"""
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
@pytest.mark.powershell
class TestPowerShellLanguageServerBasics:
"""Test basic functionality of the PowerShell language server."""
@pytest.mark.parametrize("language_server", [Language.POWERSHELL], indirect=True)
def test_powershell_language_server_initialization(self, language_server: SolidLanguageServer) -> None:
"""Test that PowerShell language server can be initialized successfully."""
assert language_server is not None
assert language_server.language == Language.POWERSHELL
@pytest.mark.parametrize("language_server", [Language.POWERSHELL], indirect=True)
def test_powershell_request_document_symbols(self, language_server: SolidLanguageServer) -> None:
"""Test request_document_symbols for PowerShell files."""
# Test getting symbols from main.ps1
all_symbols, _root_symbols = language_server.request_document_symbols("main.ps1").get_all_symbols_and_roots()
# Extract function symbols (LSP Symbol Kind 12)
function_symbols = [symbol for symbol in all_symbols if symbol.get("kind") == 12]
function_names = [symbol["name"] for symbol in function_symbols]
# PSES returns function names in format "function FuncName ()" - check for function name substring
def has_function(name: str) -> bool:
return any(name in fn for fn in function_names)
# Should detect the main functions from main.ps1
assert has_function("Greet-User"), f"Should find Greet-User function in {function_names}"
assert has_function("Process-Items"), f"Should find Process-Items function in {function_names}"
assert has_function("Main"), f"Should find Main function in {function_names}"
assert len(function_symbols) >= 3, f"Should find at least 3 functions, found {len(function_symbols)}"
@pytest.mark.parametrize("language_server", [Language.POWERSHELL], indirect=True)
def test_powershell_utils_functions(self, language_server: SolidLanguageServer) -> None:
"""Test function detection in utils.ps1 file."""
# Test with utils.ps1
utils_all_symbols, _utils_root_symbols = language_server.request_document_symbols("utils.ps1").get_all_symbols_and_roots()
utils_function_symbols = [symbol for symbol in utils_all_symbols if symbol.get("kind") == 12]
utils_function_names = [symbol["name"] for symbol in utils_function_symbols]
# PSES returns function names in format "function FuncName ()" - check for function name substring
def has_function(name: str) -> bool:
return any(name in fn for fn in utils_function_names)
# Should detect functions from utils.ps1
expected_utils_functions = [
"Convert-ToUpperCase",
"Convert-ToLowerCase",
"Remove-Whitespace",
"Backup-File",
"Test-ArrayContains",
"Write-LogMessage",
"Test-ValidEmail",
"Test-IsNumber",
]
for func_name in expected_utils_functions:
assert has_function(func_name), f"Should find {func_name} function in utils.ps1, got {utils_function_names}"
assert len(utils_function_symbols) >= 8, f"Should find at least 8 functions in utils.ps1, found {len(utils_function_symbols)}"
@pytest.mark.parametrize("language_server", [Language.POWERSHELL], indirect=True)
def test_powershell_function_with_parameters(self, language_server: SolidLanguageServer) -> None:
"""Test that functions with CmdletBinding and parameters are detected correctly."""
all_symbols, _root_symbols = language_server.request_document_symbols("main.ps1").get_all_symbols_and_roots()
function_symbols = [symbol for symbol in all_symbols if symbol.get("kind") == 12]
# Find Greet-User function which has parameters
# PSES returns function names in format "function FuncName ()"
greet_user_symbol = next((sym for sym in function_symbols if "Greet-User" in sym["name"]), None)
assert greet_user_symbol is not None, f"Should find Greet-User function in {[s['name'] for s in function_symbols]}"
# Find Process-Items function which has array parameter
process_items_symbol = next((sym for sym in function_symbols if "Process-Items" in sym["name"]), None)
assert process_items_symbol is not None, f"Should find Process-Items function in {[s['name'] for s in function_symbols]}"
@pytest.mark.parametrize("language_server", [Language.POWERSHELL], indirect=True)
def test_powershell_all_function_detection(self, language_server: SolidLanguageServer) -> None:
"""Test that all expected functions are detected across both files."""
# Get symbols from main.ps1
main_all_symbols, _main_root_symbols = language_server.request_document_symbols("main.ps1").get_all_symbols_and_roots()
main_functions = [symbol for symbol in main_all_symbols if symbol.get("kind") == 12]
main_function_names = [func["name"] for func in main_functions]
# Get symbols from utils.ps1
utils_all_symbols, _utils_root_symbols = language_server.request_document_symbols("utils.ps1").get_all_symbols_and_roots()
utils_functions = [symbol for symbol in utils_all_symbols if symbol.get("kind") == 12]
utils_function_names = [func["name"] for func in utils_functions]
# PSES returns function names in format "function FuncName ()" - check for function name substring
def has_main_function(name: str) -> bool:
return any(name in fn for fn in main_function_names)
def has_utils_function(name: str) -> bool:
return any(name in fn for fn in utils_function_names)
# Verify main.ps1 functions
expected_main = ["Greet-User", "Process-Items", "Main"]
for expected_func in expected_main:
assert has_main_function(expected_func), f"Should detect {expected_func} function in main.ps1, got {main_function_names}"
# Verify utils.ps1 functions
expected_utils = [
"Convert-ToUpperCase",
"Convert-ToLowerCase",
"Remove-Whitespace",
"Backup-File",
"Test-ArrayContains",
"Write-LogMessage",
"Test-ValidEmail",
"Test-IsNumber",
]
for expected_func in expected_utils:
assert has_utils_function(expected_func), f"Should detect {expected_func} function in utils.ps1, got {utils_function_names}"
# Verify total counts
assert len(main_functions) >= 3, f"Should find at least 3 functions in main.ps1, found {len(main_functions)}"
assert len(utils_functions) >= 8, f"Should find at least 8 functions in utils.ps1, found {len(utils_functions)}"
@pytest.mark.parametrize("language_server", [Language.POWERSHELL], indirect=True)
def test_powershell_find_references_within_file(self, language_server: SolidLanguageServer) -> None:
"""Test finding references to a function within the same file."""
main_path = "main.ps1"
# Get symbols to find the Greet-User function which is called from Main
all_symbols, _root_symbols = language_server.request_document_symbols(main_path).get_all_symbols_and_roots()
# Find Greet-User function definition
function_symbols = [s for s in all_symbols if s.get("kind") == 12]
greet_user_symbol = next((s for s in function_symbols if "Greet-User" in s["name"]), None)
assert greet_user_symbol is not None, f"Should find Greet-User function in {[s['name'] for s in function_symbols]}"
# Find references to Greet-User (should be called from Main function at line 91)
sel_start = greet_user_symbol["selectionRange"]["start"]
refs = language_server.request_references(main_path, sel_start["line"], sel_start["character"])
# Should find at least the call site in Main function
assert refs is not None and len(refs) >= 1, f"Should find references to Greet-User, got {refs}"
assert any(
"main.ps1" in ref.get("uri", ref.get("relativePath", "")) for ref in refs
), f"Should find reference in main.ps1, got {refs}"
@pytest.mark.parametrize("language_server", [Language.POWERSHELL], indirect=True)
def test_powershell_find_definition_across_files(self, language_server: SolidLanguageServer) -> None:
"""Test finding definition of functions across files (main.ps1 -> utils.ps1)."""
# main.ps1 calls Convert-ToUpperCase from utils.ps1 at line 99 (0-indexed: 98)
# The call is: $upperName = Convert-ToUpperCase -InputString $User
# We'll request definition from the call site in main.ps1
main_path = "main.ps1"
# Find definition of Convert-ToUpperCase from its usage in main.ps1
# Line 99 (1-indexed) = line 98 (0-indexed), character position ~16 for "Convert-ToUpperCase"
definition_locations = language_server.request_definition(main_path, 98, 18)
# Should find the definition in utils.ps1
assert (
definition_locations is not None and len(definition_locations) >= 1
), f"Should find definition of Convert-ToUpperCase, got {definition_locations}"
assert any(
"utils.ps1" in loc.get("uri", "") for loc in definition_locations
), f"Should find definition in utils.ps1, got {definition_locations}"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/powershell/test_powershell_basic.py",
"license": "MIT License",
"lines": 139,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/toml/test_toml_basic.py | """
Basic integration tests for the TOML language server functionality.
These tests validate the functionality of the Taplo language server APIs
like request_document_symbols using the TOML test repository.
"""
from pathlib import Path
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
@pytest.mark.toml
class TestTomlLanguageServerBasics:
"""Test basic functionality of the TOML language server (Taplo)."""
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_toml_language_server_initialization(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that TOML language server can be initialized successfully."""
assert language_server is not None
assert language_server.language == Language.TOML
assert language_server.is_running()
assert Path(language_server.language_server.repository_root_path).resolve() == repo_path.resolve()
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_toml_cargo_file_symbols(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test document symbols detection in Cargo.toml with specific symbol verification."""
all_symbols, root_symbols = language_server.request_document_symbols("Cargo.toml").get_all_symbols_and_roots()
assert all_symbols is not None, "Should return symbols for Cargo.toml"
assert len(all_symbols) > 0, f"Should find symbols in Cargo.toml, found {len(all_symbols)}"
# Verify specific top-level table names are detected
symbol_names = [sym.get("name") for sym in all_symbols]
assert "package" in symbol_names, "Should detect 'package' table in Cargo.toml"
assert "dependencies" in symbol_names, "Should detect 'dependencies' table in Cargo.toml"
assert "dev-dependencies" in symbol_names, "Should detect 'dev-dependencies' table in Cargo.toml"
assert "features" in symbol_names, "Should detect 'features' table in Cargo.toml"
assert "workspace" in symbol_names, "Should detect 'workspace' table in Cargo.toml"
# Verify nested symbols exist (keys under 'package')
assert "name" in symbol_names, "Should detect nested 'name' key"
assert "version" in symbol_names, "Should detect nested 'version' key"
assert "edition" in symbol_names, "Should detect nested 'edition' key"
# Check symbol kind for tables - Taplo uses kind 19 (object) for TOML tables
package_symbol = next((s for s in all_symbols if s.get("name") == "package"), None)
assert package_symbol is not None, "Should find 'package' symbol"
assert package_symbol.get("kind") == 19, "Top-level table should have kind 19 (object)"
dependencies_symbol = next((s for s in all_symbols if s.get("name") == "dependencies"), None)
assert dependencies_symbol is not None, "Should find 'dependencies' symbol"
assert dependencies_symbol.get("kind") == 19, "'dependencies' table should have kind 19 (object)"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_toml_pyproject_file_symbols(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test document symbols detection in pyproject.toml."""
all_symbols, root_symbols = language_server.request_document_symbols("pyproject.toml").get_all_symbols_and_roots()
assert all_symbols is not None, "Should return symbols for pyproject.toml"
assert len(all_symbols) > 0, f"Should find symbols in pyproject.toml, found {len(all_symbols)}"
# Verify specific top-level table names
symbol_names = [sym.get("name") for sym in all_symbols]
assert "project" in symbol_names, "Should detect 'project' table"
assert "build-system" in symbol_names, "Should detect 'build-system' table"
# Verify tool sections (nested tables)
# These could appear as 'tool' or 'tool.ruff' depending on Taplo's parsing
has_tool_section = any("tool" in name for name in symbol_names if name)
assert has_tool_section, "Should detect tool sections"
# Verify nested keys under project
assert "name" in symbol_names, "Should detect 'name' under project"
assert "version" in symbol_names, "Should detect 'version' under project"
assert "requires-python" in symbol_names or "dependencies" in symbol_names, "Should detect project dependencies"
# Check symbol kind for tables - Taplo uses kind 19 (object) for TOML tables
project_symbol = next((s for s in all_symbols if s.get("name") == "project"), None)
assert project_symbol is not None, "Should find 'project' symbol"
assert project_symbol.get("kind") == 19, "'project' table should have kind 19 (object)"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_toml_symbol_kinds(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that TOML symbols have appropriate LSP kinds for different value types."""
all_symbols, root_symbols = language_server.request_document_symbols("Cargo.toml").get_all_symbols_and_roots()
assert all_symbols is not None
assert len(all_symbols) > 0
# Check boolean symbol kind (lto = true at line 22)
# LSP kind 17 = boolean
lto_symbol = next((s for s in all_symbols if s.get("name") == "lto"), None)
assert lto_symbol is not None, "Should find 'lto' boolean symbol"
assert lto_symbol.get("kind") == 17, "'lto' should have kind 17 (boolean)"
# Check number symbol kind (opt-level = 3 at line 23)
# LSP kind 16 = number
opt_level_symbol = next((s for s in all_symbols if s.get("name") == "opt-level"), None)
assert opt_level_symbol is not None, "Should find 'opt-level' number symbol"
assert opt_level_symbol.get("kind") == 16, "'opt-level' should have kind 16 (number)"
# Check string symbol kind (name = "test_project" at line 2)
# LSP kind 15 = string
name_symbols = [s for s in all_symbols if s.get("name") == "name"]
assert len(name_symbols) > 0, "Should find 'name' symbols"
# At least one should be a string
string_name_symbol = next((s for s in name_symbols if s.get("kind") == 15), None)
assert string_name_symbol is not None, "Should find 'name' with kind 15 (string)"
# Check array symbol kind (default = ["feature1"] at line 17)
# LSP kind 18 = array
default_symbol = next((s for s in all_symbols if s.get("name") == "default"), None)
assert default_symbol is not None, "Should find 'default' array symbol"
assert default_symbol.get("kind") == 18, "'default' should have kind 18 (array)"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_toml_symbols_with_body(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test request_document_symbols with body extraction."""
all_symbols, root_symbols = language_server.request_document_symbols("Cargo.toml").get_all_symbols_and_roots()
assert all_symbols is not None, "Should return symbols for Cargo.toml"
assert len(all_symbols) > 0, "Should have symbols"
# Find the 'package' symbol and verify its body
package_symbol = next((s for s in all_symbols if s.get("name") == "package"), None)
assert package_symbol is not None, "Should find 'package' symbol"
# Check that body exists and contains expected content
# Note: Taplo includes the section header in the body
assert "body" in package_symbol, "'package' symbol should have body"
package_body = package_symbol["body"].get_text()
assert 'name = "test_project"' in package_body, "Body should contain 'name' field"
assert 'version = "0.1.0"' in package_body, "Body should contain 'version' field"
assert 'edition = "2021"' in package_body, "Body should contain 'edition' field"
# Find the dependencies symbol and check its body
deps_symbol = next((s for s in all_symbols if s.get("name") == "dependencies"), None)
assert deps_symbol is not None, "Should find 'dependencies' symbol"
assert "body" in deps_symbol, "'dependencies' symbol should have body"
deps_body = deps_symbol["body"].get_text()
assert "serde" in deps_body, "Body should contain serde dependency"
assert "tokio" in deps_body, "Body should contain tokio dependency"
# Find the top-level [features] section (not the nested 'features' in serde dependency)
# The [features] section should be kind 19 (object) and at line 15 (0-indexed)
features_symbols = [s for s in all_symbols if s.get("name") == "features"]
# Find the top-level one - should be kind 19 (object) with children
features_symbol = next(
(s for s in features_symbols if s.get("kind") == 19 and s.get("children")),
None,
)
assert features_symbol is not None, "Should find top-level 'features' table symbol"
assert "body" in features_symbol, "'features' symbol should have body"
features_body = features_symbol["body"].get_text()
assert "default" in features_body, "Body should contain 'default' feature"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_toml_symbol_ranges(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that symbols have proper range information."""
all_symbols, root_symbols = language_server.request_document_symbols("Cargo.toml").get_all_symbols_and_roots()
assert all_symbols is not None
assert len(all_symbols) > 0
# Check the 'package' symbol range - should start at line 0 (0-indexed, actual line 1)
package_symbol = next((s for s in all_symbols if s.get("name") == "package"), None)
assert package_symbol is not None, "Should find 'package' symbol"
assert "range" in package_symbol, "'package' symbol should have range"
package_range = package_symbol["range"]
assert "start" in package_range, "Range should have start"
assert "end" in package_range, "Range should have end"
assert package_range["start"]["line"] == 0, "'package' should start at line 0 (0-indexed, actual line 1)"
# Package block spans from line 1 to line 7 in file (1-indexed)
# In 0-indexed LSP coordinates: line 0 (start) to line 6 or 7 (end)
assert package_range["end"]["line"] >= 6, "'package' should end at or after line 6 (0-indexed)"
# Check a nested symbol range - 'name' under package at line 2 (1-indexed), line 1 (0-indexed)
name_symbols = [s for s in all_symbols if s.get("name") == "name"]
assert len(name_symbols) > 0, "Should find 'name' symbols"
# Find the one under 'package' (should be at line 1 in 0-indexed)
package_name = next((s for s in name_symbols if s["range"]["start"]["line"] == 1), None)
assert package_name is not None, "Should find 'name' under 'package'"
# Check the dependencies range - starts at line 9 (1-indexed), line 8 (0-indexed)
deps_symbol = next((s for s in all_symbols if s.get("name") == "dependencies"), None)
assert deps_symbol is not None, "Should find 'dependencies' symbol"
deps_range = deps_symbol["range"]
assert deps_range["start"]["line"] == 8, "'dependencies' should start at line 8 (0-indexed, actual line 9)"
# Check that range includes line and character positions
assert "line" in package_range["start"], "Start should have line"
assert "character" in package_range["start"], "Start should have character"
assert "line" in package_range["end"], "End should have line"
assert "character" in package_range["end"], "End should have character"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_toml_nested_table_symbols(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test detection of nested table symbols like profile.release and tool.ruff."""
# Test Cargo.toml for profile.release
cargo_symbols, _ = language_server.request_document_symbols("Cargo.toml").get_all_symbols_and_roots()
assert cargo_symbols is not None
symbol_names = [sym.get("name") for sym in cargo_symbols]
# Should detect profile.release or profile section
has_profile = any("profile" in name for name in symbol_names if name)
assert has_profile, "Should detect profile section in Cargo.toml"
# Test pyproject.toml for tool sections
pyproject_symbols, _ = language_server.request_document_symbols("pyproject.toml").get_all_symbols_and_roots()
assert pyproject_symbols is not None
pyproject_names = [sym.get("name") for sym in pyproject_symbols]
# Should detect tool.ruff, tool.mypy sections
has_ruff = any("ruff" in name for name in pyproject_names if name)
has_mypy = any("mypy" in name for name in pyproject_names if name)
assert has_ruff or has_mypy, "Should detect tool sections in pyproject.toml"
# Verify pyproject has expected boolean: strict = true
strict_symbol = next((s for s in pyproject_symbols if s.get("name") == "strict"), None)
if strict_symbol:
assert strict_symbol.get("kind") == 17, "'strict' should have kind 17 (boolean)"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/toml/test_toml_basic.py",
"license": "MIT License",
"lines": 190,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/toml/test_toml_edge_cases.py | """
Tests for TOML language server edge cases and advanced features.
These tests cover:
- Inline tables
- Multiline strings
- Arrays of tables
- Nested tables
- Various TOML data types
"""
from pathlib import Path
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
pytestmark = pytest.mark.toml
class TestTomlEdgeCases:
"""Test TOML language server handling of edge cases and advanced features."""
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_inline_table_detection(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that inline tables are properly detected."""
all_symbols, root_symbols = language_server.request_document_symbols("config.toml").get_all_symbols_and_roots()
assert all_symbols is not None
assert len(all_symbols) > 0
symbol_names = [sym.get("name") for sym in all_symbols]
# The inline table 'endpoint' should be detected
assert "endpoint" in symbol_names, "Should detect 'endpoint' inline table"
# Find the endpoint symbol and check its properties
endpoint_symbol = next((s for s in all_symbols if s.get("name") == "endpoint"), None)
assert endpoint_symbol is not None
# Inline tables should be kind 19 (object)
assert endpoint_symbol.get("kind") == 19, "Inline table should have kind 19 (object)"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_nested_table_detection(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that deeply nested tables are properly detected."""
all_symbols, root_symbols = language_server.request_document_symbols("config.toml").get_all_symbols_and_roots()
symbol_names = [sym.get("name") for sym in all_symbols]
# Should detect nested tables like server.ssl and database.pool
has_ssl = any("ssl" in str(name).lower() for name in symbol_names if name)
has_pool = any("pool" in str(name).lower() for name in symbol_names if name)
assert has_ssl, f"Should detect 'server.ssl' nested table, got: {symbol_names}"
assert has_pool, f"Should detect 'database.pool' nested table, got: {symbol_names}"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_array_of_tables_detection(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that [[array_of_tables]] syntax is properly detected."""
all_symbols, root_symbols = language_server.request_document_symbols("config.toml").get_all_symbols_and_roots()
symbol_names = [sym.get("name") for sym in all_symbols]
# Should detect [[endpoints]] array of tables
assert "endpoints" in symbol_names, f"Should detect '[[endpoints]]' array of tables, got: {symbol_names}"
# Find the endpoints symbol
endpoints_symbol = next((s for s in all_symbols if s.get("name") == "endpoints"), None)
assert endpoints_symbol is not None
# Array of tables should be kind 18 (array)
assert endpoints_symbol.get("kind") == 18, "Array of tables should have kind 18 (array)"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_multiline_string_handling(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that multiline strings are handled correctly."""
all_symbols, root_symbols = language_server.request_document_symbols("config.toml").get_all_symbols_and_roots()
symbol_names = [sym.get("name") for sym in all_symbols]
# Should detect connection_string and multiline fields
assert "connection_string" in symbol_names, "Should detect 'connection_string' with multiline value"
assert "multiline" in symbol_names, "Should detect 'multiline' literal string"
# Find connection_string and verify it's a string type
conn_symbol = next((s for s in all_symbols if s.get("name") == "connection_string"), None)
assert conn_symbol is not None
# String type should be kind 15
assert conn_symbol.get("kind") == 15, "Multiline string should have kind 15 (string)"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_array_value_detection(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that array values are properly detected."""
all_symbols, root_symbols = language_server.request_document_symbols("config.toml").get_all_symbols_and_roots()
symbol_names = [sym.get("name") for sym in all_symbols]
# Should detect 'outputs' and 'methods' arrays
assert "outputs" in symbol_names, "Should detect 'outputs' array"
assert "methods" in symbol_names, "Should detect 'methods' array"
# Find outputs array and verify kind
outputs_symbol = next((s for s in all_symbols if s.get("name") == "outputs"), None)
assert outputs_symbol is not None
# Arrays should have kind 18
assert outputs_symbol.get("kind") == 18, "'outputs' should have kind 18 (array)"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_float_value_detection(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that float values are properly detected."""
all_symbols, root_symbols = language_server.request_document_symbols("config.toml").get_all_symbols_and_roots()
symbol_names = [sym.get("name") for sym in all_symbols]
# Should detect 'timeout' which has a float value (30.5)
assert "timeout" in symbol_names, "Should detect 'timeout' float value"
# Find timeout and verify it's a number
timeout_symbol = next((s for s in all_symbols if s.get("name") == "timeout"), None)
assert timeout_symbol is not None
# Numbers should have kind 16
assert timeout_symbol.get("kind") == 16, "'timeout' should have kind 16 (number)"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_datetime_value_detection(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that datetime values are detected."""
all_symbols, root_symbols = language_server.request_document_symbols("config.toml").get_all_symbols_and_roots()
symbol_names = [sym.get("name") for sym in all_symbols]
# Should detect metadata section with datetime values
assert "metadata" in symbol_names, "Should detect 'metadata' section"
assert "created" in symbol_names, "Should detect 'created' datetime field"
assert "updated" in symbol_names, "Should detect 'updated' datetime field"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_symbol_body_with_inline_table(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that symbol bodies include inline table content."""
all_symbols, root_symbols = language_server.request_document_symbols("config.toml").get_all_symbols_and_roots()
# Find the endpoint symbol with body
endpoint_symbol = next((s for s in all_symbols if s.get("name") == "endpoint"), None)
assert endpoint_symbol is not None
if "body" in endpoint_symbol:
body = endpoint_symbol["body"].get_text()
# Body should contain the inline table syntax
assert "url" in body or "version" in body, f"Body should contain inline table contents, got: {body}"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_symbol_ranges_in_config(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that symbol ranges are correct in config.toml."""
all_symbols, root_symbols = language_server.request_document_symbols("config.toml").get_all_symbols_and_roots()
# Find the server symbol
server_symbol = next((s for s in all_symbols if s.get("name") == "server"), None)
assert server_symbol is not None
assert "range" in server_symbol
# Server should start near the beginning (line 2 is [server], 0-indexed: line 2)
server_range = server_symbol["range"]
assert server_range["start"]["line"] >= 0, "Server should start at or near the beginning"
assert server_range["end"]["line"] > server_range["start"]["line"], "Server block should span multiple lines"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_comment_handling(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that comments don't interfere with symbol detection."""
all_symbols, root_symbols = language_server.request_document_symbols("config.toml").get_all_symbols_and_roots()
symbol_names = [sym.get("name") for sym in all_symbols]
# File has comments but symbols should still be detected correctly
expected_sections = {"server", "database", "logging", "endpoints", "metadata", "messages"}
found_sections = expected_sections.intersection(set(symbol_names))
assert len(found_sections) >= 4, f"Should find most sections despite comments, found: {found_sections}"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_special_characters_in_strings(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that strings with escape sequences are handled."""
all_symbols, root_symbols = language_server.request_document_symbols("config.toml").get_all_symbols_and_roots()
symbol_names = [sym.get("name") for sym in all_symbols]
# Should detect the messages section with special strings
assert "messages" in symbol_names, "Should detect 'messages' section"
assert "with_escapes" in symbol_names, "Should detect 'with_escapes' field"
assert "welcome" in symbol_names, "Should detect 'welcome' field"
class TestTomlDependencyTables:
"""Test handling of dependency-style tables common in Cargo.toml and pyproject.toml."""
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_complex_dependency_inline_table(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test detection of complex inline table dependencies like serde = { version = "1.0", features = ["derive"] }."""
all_symbols, root_symbols = language_server.request_document_symbols("Cargo.toml").get_all_symbols_and_roots()
symbol_names = [sym.get("name") for sym in all_symbols]
# Should detect serde and tokio dependencies
assert "serde" in symbol_names, "Should detect 'serde' dependency"
assert "tokio" in symbol_names, "Should detect 'tokio' dependency"
# Find serde symbol
serde_symbol = next((s for s in all_symbols if s.get("name") == "serde"), None)
assert serde_symbol is not None
# Dependency with inline table should be kind 19 (object)
assert serde_symbol.get("kind") == 19, "Complex dependency should have kind 19 (object)"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_simple_dependency_string(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test detection of simple string dependencies like proptest = "1.0"."""
all_symbols, root_symbols = language_server.request_document_symbols("Cargo.toml").get_all_symbols_and_roots()
symbol_names = [sym.get("name") for sym in all_symbols]
# Should detect proptest dev-dependency
assert "proptest" in symbol_names, "Should detect 'proptest' dependency"
# Find proptest symbol
proptest_symbol = next((s for s in all_symbols if s.get("name") == "proptest"), None)
assert proptest_symbol is not None
# Simple string dependency should be kind 15 (string)
assert proptest_symbol.get("kind") == 15, "Simple string dependency should have kind 15 (string)"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_pyproject_dependencies_array(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test detection of pyproject.toml dependencies array."""
all_symbols, root_symbols = language_server.request_document_symbols("pyproject.toml").get_all_symbols_and_roots()
symbol_names = [sym.get("name") for sym in all_symbols]
# Should detect dependencies array
assert "dependencies" in symbol_names, "Should detect 'dependencies' array"
# Find dependencies symbol
deps_symbol = next((s for s in all_symbols if s.get("name") == "dependencies"), None)
assert deps_symbol is not None
# Dependencies array should be kind 18 (array)
assert deps_symbol.get("kind") == 18, "Dependencies array should have kind 18 (array)"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_optional_dependencies_table(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test detection of optional-dependencies in pyproject.toml."""
all_symbols, root_symbols = language_server.request_document_symbols("pyproject.toml").get_all_symbols_and_roots()
symbol_names = [sym.get("name") for sym in all_symbols]
# Should detect optional-dependencies or its nested form
has_optional_deps = any("optional" in str(name).lower() for name in symbol_names if name)
has_dev = "dev" in symbol_names
assert has_optional_deps or has_dev, f"Should detect optional-dependencies or dev group, got: {symbol_names}"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/toml/test_toml_edge_cases.py",
"license": "MIT License",
"lines": 203,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/toml/test_toml_ignored_dirs.py | """
Tests for TOML language server directory ignoring functionality.
These tests validate that the Taplo language server correctly ignores
TOML-specific directories like target, .cargo, and node_modules.
"""
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
pytestmark = pytest.mark.toml
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
class TestTomlIgnoredDirectories:
"""Test TOML-specific directory ignoring behavior."""
def test_default_ignored_directories(self, language_server: SolidLanguageServer) -> None:
"""Test that default TOML directories are ignored."""
# Test that TOML/Rust/Node-specific directories are ignored by default
assert language_server.is_ignored_dirname("target"), "target should be ignored"
assert language_server.is_ignored_dirname(".cargo"), ".cargo should be ignored"
assert language_server.is_ignored_dirname("node_modules"), "node_modules should be ignored"
# Directories starting with . are ignored by base class
assert language_server.is_ignored_dirname(".git"), ".git should be ignored"
assert language_server.is_ignored_dirname(".venv"), ".venv should be ignored"
def test_important_directories_not_ignored(self, language_server: SolidLanguageServer) -> None:
"""Test that important directories are not ignored."""
# Common project directories should not be ignored
assert not language_server.is_ignored_dirname("src"), "src should not be ignored"
assert not language_server.is_ignored_dirname("crates"), "crates should not be ignored"
assert not language_server.is_ignored_dirname("lib"), "lib should not be ignored"
assert not language_server.is_ignored_dirname("tests"), "tests should not be ignored"
assert not language_server.is_ignored_dirname("config"), "config should not be ignored"
def test_cargo_related_directories(self, language_server: SolidLanguageServer) -> None:
"""Test Cargo/Rust-related directory handling."""
# Rust build directories should be ignored
assert language_server.is_ignored_dirname("target"), "target (Rust build) should be ignored"
assert language_server.is_ignored_dirname(".cargo"), ".cargo should be ignored"
# But important Rust directories should not be ignored
assert not language_server.is_ignored_dirname("benches"), "benches should not be ignored"
assert not language_server.is_ignored_dirname("examples"), "examples should not be ignored"
def test_various_cache_directories(self, language_server: SolidLanguageServer) -> None:
"""Test various cache and temporary directories are ignored."""
# Directories starting with . are ignored by base class
assert language_server.is_ignored_dirname(".cache"), ".cache should be ignored"
# IDE directories (start with .)
assert language_server.is_ignored_dirname(".idea"), ".idea should be ignored"
assert language_server.is_ignored_dirname(".vscode"), ".vscode should be ignored"
# Note: __pycache__ is NOT ignored by TOML server (only Python servers ignore it)
assert not language_server.is_ignored_dirname("__pycache__"), "__pycache__ is not TOML-specific"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/toml/test_toml_ignored_dirs.py",
"license": "MIT License",
"lines": 46,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/toml/test_toml_symbol_retrieval.py | """
Tests for TOML language server symbol retrieval functionality.
These tests focus on advanced symbol operations:
- request_containing_symbol
- request_document_overview
- request_full_symbol_tree
- request_dir_overview
"""
from pathlib import Path
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
pytestmark = pytest.mark.toml
class TestTomlSymbolRetrieval:
"""Test advanced symbol retrieval functionality for TOML files."""
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_request_containing_symbol_behavior(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test request_containing_symbol behavior for TOML files.
Note: Taplo LSP doesn't support definition/containing symbol lookups for TOML files
since TOML is a configuration format, not code. This test verifies the behavior.
"""
# Line 2 (0-indexed: 1) is inside the [package] table
containing_symbol = language_server.request_containing_symbol("Cargo.toml", 1, 5)
# Taplo doesn't support containing symbol lookup - returns None
# This is expected behavior for a configuration file format
assert containing_symbol is None, "TOML LSP doesn't support containing symbol lookup"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_request_document_overview_cargo(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test request_document_overview for Cargo.toml."""
overview = language_server.request_document_overview("Cargo.toml")
assert overview is not None
assert len(overview) > 0
# Get symbol names from overview
symbol_names = {symbol.get("name") for symbol in overview if "name" in symbol}
# Verify expected top-level tables appear
expected_tables = {"package", "dependencies", "dev-dependencies", "features", "workspace"}
assert expected_tables.issubset(symbol_names), f"Missing expected tables in overview: {expected_tables - symbol_names}"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_request_document_overview_pyproject(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test request_document_overview for pyproject.toml."""
overview = language_server.request_document_overview("pyproject.toml")
assert overview is not None
assert len(overview) > 0
# Get symbol names from overview
symbol_names = {symbol.get("name") for symbol in overview if "name" in symbol}
# Verify expected top-level tables appear
assert "project" in symbol_names, "Should detect 'project' table"
assert "build-system" in symbol_names, "Should detect 'build-system' table"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_request_full_symbol_tree(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test request_full_symbol_tree returns TOML files."""
symbol_tree = language_server.request_full_symbol_tree()
assert symbol_tree is not None
assert len(symbol_tree) > 0
# The root should be test_repo
root = symbol_tree[0]
assert root["name"] == "test_repo"
assert "children" in root
# Children should include TOML files
child_names = {child["name"] for child in root.get("children", [])}
# Note: File names are stripped of extension in some cases
assert (
"Cargo" in child_names or "Cargo.toml" in child_names or any("cargo" in name.lower() for name in child_names)
), f"Should find Cargo.toml in tree, got: {child_names}"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_request_dir_overview(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test request_dir_overview returns symbols for TOML files."""
overview = language_server.request_dir_overview(".")
assert overview is not None
assert len(overview) > 0
# Should have entries for both Cargo.toml and pyproject.toml
file_paths = list(overview.keys())
assert any("Cargo.toml" in path for path in file_paths), f"Should find Cargo.toml in overview, got: {file_paths}"
assert any("pyproject.toml" in path for path in file_paths), f"Should find pyproject.toml in overview, got: {file_paths}"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_symbol_hierarchy_in_cargo(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that symbol hierarchy is properly preserved in Cargo.toml."""
all_symbols, root_symbols = language_server.request_document_symbols("Cargo.toml").get_all_symbols_and_roots()
# Find the 'package' table
package_symbol = next((s for s in root_symbols if s.get("name") == "package"), None)
assert package_symbol is not None, "Should find 'package' as root symbol"
# Verify it has children (nested keys)
assert "children" in package_symbol, "'package' should have children"
child_names = {child.get("name") for child in package_symbol.get("children", [])}
# Package should have name, version, edition at minimum
assert "name" in child_names, "'package' should have 'name' child"
assert "version" in child_names, "'package' should have 'version' child"
assert "edition" in child_names, "'package' should have 'edition' child"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_symbol_hierarchy_in_pyproject(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that symbol hierarchy is properly preserved in pyproject.toml."""
all_symbols, root_symbols = language_server.request_document_symbols("pyproject.toml").get_all_symbols_and_roots()
# Find the 'project' table
project_symbol = next((s for s in root_symbols if s.get("name") == "project"), None)
assert project_symbol is not None, "Should find 'project' as root symbol"
# Verify it has children
assert "children" in project_symbol, "'project' should have children"
child_names = {child.get("name") for child in project_symbol.get("children", [])}
# Project should have name, version, dependencies at minimum
assert "name" in child_names, "'project' should have 'name' child"
assert "version" in child_names, "'project' should have 'version' child"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_tool_section_hierarchy(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that tool sections in pyproject.toml are properly structured."""
all_symbols, root_symbols = language_server.request_document_symbols("pyproject.toml").get_all_symbols_and_roots()
# Get all symbol names
all_names = [s.get("name") for s in all_symbols]
# Should detect tool.ruff, tool.mypy, or tool.pytest
has_ruff = any("ruff" in name.lower() for name in all_names if name)
has_mypy = any("mypy" in name.lower() for name in all_names if name)
has_pytest = any("pytest" in name.lower() for name in all_names if name)
assert has_ruff or has_mypy or has_pytest, f"Should detect tool sections, got names: {all_names}"
@pytest.mark.parametrize("language_server", [Language.TOML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.TOML], indirect=True)
def test_array_of_tables_symbol(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that [[bin]] array of tables is detected."""
all_symbols, root_symbols = language_server.request_document_symbols("Cargo.toml").get_all_symbols_and_roots()
# Get all symbol names
all_names = [s.get("name") for s in all_symbols]
# Should detect bin array of tables
has_bin = "bin" in all_names
assert has_bin, f"Should detect [[bin]] array of tables, got names: {all_names}"
# Find the bin symbol and verify its structure
bin_symbol = next((s for s in all_symbols if s.get("name") == "bin"), None)
assert bin_symbol is not None, "Should find bin symbol"
# Array of tables should be kind 18 (array)
assert bin_symbol.get("kind") == 18, "[[bin]] should have kind 18 (array)"
# Children of array of tables are indexed by position ('0', '1', etc.)
if "children" in bin_symbol:
bin_children = bin_symbol.get("children", [])
assert len(bin_children) > 0, "[[bin]] should have at least one child element"
# First child is index '0'
first_child = bin_children[0]
assert first_child.get("name") == "0", f"First array element should be named '0', got: {first_child.get('name')}"
# The '0' element should contain name and path as grandchildren
if "children" in first_child:
grandchild_names = {gc.get("name") for gc in first_child.get("children", [])}
assert "name" in grandchild_names, f"[[bin]] element should have 'name' field, got: {grandchild_names}"
assert "path" in grandchild_names, f"[[bin]] element should have 'path' field, got: {grandchild_names}"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/toml/test_toml_symbol_retrieval.py",
"license": "MIT License",
"lines": 147,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/vue/test_vue_basic.py | import os
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
from solidlsp.ls_utils import SymbolUtils
@pytest.mark.vue
class TestVueLanguageServer:
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_vue_files_in_symbol_tree(self, language_server: SolidLanguageServer) -> None:
symbols = language_server.request_full_symbol_tree()
assert SymbolUtils.symbol_tree_contains_name(symbols, "App"), "App not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "CalculatorButton"), "CalculatorButton not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "CalculatorInput"), "CalculatorInput not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "CalculatorDisplay"), "CalculatorDisplay not found in symbol tree"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_find_referencing_symbols(self, language_server: SolidLanguageServer) -> None:
store_file = os.path.join("src", "stores", "calculator.ts")
symbols = language_server.request_document_symbols(store_file).get_all_symbols_and_roots()
# Find useCalculatorStore function
store_symbol = None
for sym in symbols[0]:
if sym.get("name") == "useCalculatorStore":
store_symbol = sym
break
assert store_symbol is not None, "useCalculatorStore function not found"
# Get references
sel_start = store_symbol["selectionRange"]["start"]
refs = language_server.request_references(store_file, sel_start["line"], sel_start["character"])
# Should have multiple references: definition + usage in App.vue, CalculatorInput.vue, CalculatorDisplay.vue
assert len(refs) >= 4, f"useCalculatorStore should have at least 4 references (definition + 3 usages), got {len(refs)}"
# Verify we have references from .vue files
vue_refs = [ref for ref in refs if ".vue" in ref.get("relativePath", "")]
assert len(vue_refs) >= 3, f"Should have at least 3 Vue component references, got {len(vue_refs)}"
@pytest.mark.vue
class TestVueDualLspArchitecture:
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_typescript_server_coordination(self, language_server: SolidLanguageServer) -> None:
ts_file = os.path.join("src", "stores", "calculator.ts")
ts_symbols = language_server.request_document_symbols(ts_file).get_all_symbols_and_roots()
ts_symbol_names = [s.get("name") for s in ts_symbols[0]]
assert len(ts_symbols[0]) >= 5, f"TypeScript server should return multiple symbols for calculator.ts, got {len(ts_symbols[0])}"
assert "useCalculatorStore" in ts_symbol_names, "TypeScript server should extract store function"
# Verify Vue server can parse .vue files
vue_file = os.path.join("src", "App.vue")
vue_symbols = language_server.request_document_symbols(vue_file).get_all_symbols_and_roots()
vue_symbol_names = [s.get("name") for s in vue_symbols[0]]
assert len(vue_symbols[0]) >= 15, f"Vue server should return at least 15 symbols for App.vue, got {len(vue_symbols[0])}"
assert "appTitle" in vue_symbol_names, "Vue server should extract ref declarations from script setup"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_cross_file_references_vue_to_typescript(self, language_server: SolidLanguageServer) -> None:
store_file = os.path.join("src", "stores", "calculator.ts")
store_symbols = language_server.request_document_symbols(store_file).get_all_symbols_and_roots()
store_symbol = None
for sym in store_symbols[0]:
if sym.get("name") == "useCalculatorStore":
store_symbol = sym
break
if not store_symbol or "selectionRange" not in store_symbol:
pytest.skip("useCalculatorStore symbol not found - test fixture may need updating")
# Request references for this symbol
sel_start = store_symbol["selectionRange"]["start"]
refs = language_server.request_references(store_file, sel_start["line"], sel_start["character"])
# Verify we found references: definition + usage in App.vue, CalculatorInput.vue, CalculatorDisplay.vue
assert len(refs) >= 4, f"useCalculatorStore should have at least 4 references (definition + 3 usages), found {len(refs)} references"
# Verify references include .vue files (components that import the store)
vue_refs = [ref for ref in refs if ".vue" in ref.get("uri", "")]
assert (
len(vue_refs) >= 3
), f"Should find at least 3 references in Vue components, found {len(vue_refs)}: {[ref.get('uri', '') for ref in vue_refs]}"
# Verify specific components that use the store
expected_vue_files = ["App.vue", "CalculatorInput.vue", "CalculatorDisplay.vue"]
found_components = []
for expected_file in expected_vue_files:
matching_refs = [ref for ref in vue_refs if expected_file in ref.get("uri", "")]
if matching_refs:
found_components.append(expected_file)
assert len(found_components) > 0, (
f"Should find references in at least one component that uses the store. "
f"Expected any of {expected_vue_files}, found references in: {[ref.get('uri', '') for ref in vue_refs]}"
)
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_cross_file_references_typescript_to_vue(self, language_server: SolidLanguageServer) -> None:
types_file = os.path.join("src", "types", "index.ts")
types_symbols = language_server.request_document_symbols(types_file).get_all_symbols_and_roots()
types_symbol_names = [s.get("name") for s in types_symbols[0]]
# Operation type is used in calculator.ts and CalculatorInput.vue
assert "Operation" in types_symbol_names, "Operation type should exist in types file"
operation_symbol = None
for sym in types_symbols[0]:
if sym.get("name") == "Operation":
operation_symbol = sym
break
if not operation_symbol or "selectionRange" not in operation_symbol:
pytest.skip("Operation type symbol not found - test fixture may need updating")
# Request references for the Operation type
sel_start = operation_symbol["selectionRange"]["start"]
refs = language_server.request_references(types_file, sel_start["line"], sel_start["character"])
# Verify we found references: definition + usage in calculator.ts and Vue files
assert len(refs) >= 2, f"Operation type should have at least 2 references (definition + usages), found {len(refs)} references"
# The Operation type should be referenced in both .ts files (calculator.ts) and potentially .vue files
all_ref_uris = [ref.get("uri", "") for ref in refs]
has_ts_refs = any(".ts" in uri and "types" not in uri for uri in all_ref_uris)
assert (
has_ts_refs
), f"Operation type should be referenced in TypeScript files like calculator.ts. Found references in: {all_ref_uris}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_reference_deduplication(self, language_server: SolidLanguageServer) -> None:
store_file = os.path.join("src", "stores", "calculator.ts")
store_symbols = language_server.request_document_symbols(store_file).get_all_symbols_and_roots()
# Find a commonly-used symbol (useCalculatorStore)
store_symbol = None
for sym in store_symbols[0]:
if sym.get("name") == "useCalculatorStore":
store_symbol = sym
break
if not store_symbol or "selectionRange" not in store_symbol:
pytest.skip("useCalculatorStore symbol not found - test fixture may need updating")
# Request references
sel_start = store_symbol["selectionRange"]["start"]
refs = language_server.request_references(store_file, sel_start["line"], sel_start["character"])
# Check for duplicate references (same file, line, and character)
seen_locations = set()
duplicates = []
for ref in refs:
# Create a unique key for this reference location
uri = ref.get("uri", "")
if "range" in ref:
line = ref["range"]["start"]["line"]
character = ref["range"]["start"]["character"]
location_key = (uri, line, character)
if location_key in seen_locations:
duplicates.append(location_key)
else:
seen_locations.add(location_key)
assert len(duplicates) == 0, (
f"Found {len(duplicates)} duplicate reference locations. "
f"The dual-LSP architecture should deduplicate references from both servers. "
f"Duplicates: {duplicates}"
)
@pytest.mark.vue
class TestVueEdgeCases:
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_symbol_tree_structure(self, language_server: SolidLanguageServer) -> None:
full_tree = language_server.request_full_symbol_tree()
# Helper to extract all file paths from symbol tree
def extract_paths_from_tree(symbols, paths=None):
"""Recursively extract file paths from symbol tree."""
if paths is None:
paths = []
if isinstance(symbols, list):
for symbol in symbols:
extract_paths_from_tree(symbol, paths)
elif isinstance(symbols, dict):
# Check if this symbol has a location
if "location" in symbols and "uri" in symbols["location"]:
uri = symbols["location"]["uri"]
# Extract the path after file://
if uri.startswith("file://"):
file_path = uri[7:] # Remove "file://"
paths.append(file_path)
# Recurse into children
if "children" in symbols:
extract_paths_from_tree(symbols["children"], paths)
return paths
all_paths = extract_paths_from_tree(full_tree)
# Verify we have files from expected directories
# Note: Symbol tree may include duplicate paths (one per symbol in file)
components_files = list({p for p in all_paths if "components" in p and ".vue" in p})
stores_files = list({p for p in all_paths if "stores" in p and ".ts" in p})
composables_files = list({p for p in all_paths if "composables" in p and ".ts" in p})
assert len(components_files) == 3, (
f"Symbol tree should include exactly 3 unique Vue components (CalculatorButton, CalculatorInput, CalculatorDisplay). "
f"Found {len(components_files)} unique component files: {[p.split('/')[-1] for p in sorted(components_files)]}"
)
assert len(stores_files) == 1, (
f"Symbol tree should include exactly 1 unique store file (calculator.ts). "
f"Found {len(stores_files)} unique store files: {[p.split('/')[-1] for p in sorted(stores_files)]}"
)
assert len(composables_files) == 2, (
f"Symbol tree should include exactly 2 unique composable files (useFormatter.ts, useTheme.ts). "
f"Found {len(composables_files)} unique composable files: {[p.split('/')[-1] for p in sorted(composables_files)]}"
)
# Verify specific expected files exist in the tree
expected_files = [
"CalculatorButton.vue",
"CalculatorInput.vue",
"CalculatorDisplay.vue",
"App.vue",
"calculator.ts",
"useFormatter.ts",
"useTheme.ts",
]
for expected_file in expected_files:
matching_files = [p for p in all_paths if expected_file in p]
assert len(matching_files) > 0, f"Expected file '{expected_file}' should be in symbol tree. All paths: {all_paths}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_document_overview(self, language_server: SolidLanguageServer) -> None:
app_file = os.path.join("src", "App.vue")
overview = language_server.request_document_overview(app_file)
# Overview should return a list of top-level symbols
assert isinstance(overview, list), f"Overview should be a list, got: {type(overview)}"
assert len(overview) >= 1, f"App.vue should have at least 1 top-level symbol in overview, got {len(overview)}"
# Extract symbol names from overview
symbol_names = [s.get("name") for s in overview if isinstance(s, dict)]
# Vue LSP returns SFC structure (template/script/style sections) for .vue files
# This is expected behavior - overview shows the file's high-level structure
assert (
len(symbol_names) >= 1
), f"Should have at least 1 symbol name in overview (e.g., 'App' or SFC section), got {len(symbol_names)}: {symbol_names}"
# Test overview for a TypeScript file
store_file = os.path.join("src", "stores", "calculator.ts")
store_overview = language_server.request_document_overview(store_file)
assert isinstance(store_overview, list), f"Store overview should be a list, got: {type(store_overview)}"
assert len(store_overview) >= 1, f"calculator.ts should have at least 1 top-level symbol in overview, got {len(store_overview)}"
store_symbol_names = [s.get("name") for s in store_overview if isinstance(s, dict)]
assert (
"useCalculatorStore" in store_symbol_names
), f"useCalculatorStore should be in store file overview. Found {len(store_symbol_names)} symbols: {store_symbol_names}"
# Test overview for another Vue component
button_file = os.path.join("src", "components", "CalculatorButton.vue")
button_overview = language_server.request_document_overview(button_file)
assert isinstance(button_overview, list), f"Button overview should be a list, got: {type(button_overview)}"
assert (
len(button_overview) >= 1
), f"CalculatorButton.vue should have at least 1 top-level symbol in overview, got {len(button_overview)}"
# For Vue files, overview provides SFC structure which is useful for navigation
# The detailed symbols are available via request_document_symbols
button_symbol_names = [s.get("name") for s in button_overview if isinstance(s, dict)]
assert len(button_symbol_names) >= 1, (
f"CalculatorButton.vue should have at least 1 symbol in overview (e.g., 'CalculatorButton' or SFC section). "
f"Found {len(button_symbol_names)} symbols: {button_symbol_names}"
)
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_directory_overview(self, language_server: SolidLanguageServer) -> None:
components_dir = os.path.join("src", "components")
dir_overview = language_server.request_dir_overview(components_dir)
# Directory overview should be a dict mapping file paths to symbol lists
assert isinstance(dir_overview, dict), f"Directory overview should be a dict, got: {type(dir_overview)}"
assert len(dir_overview) == 3, f"src/components directory should have exactly 3 files in overview, got {len(dir_overview)}"
# Verify all component files are included
expected_components = ["CalculatorButton.vue", "CalculatorInput.vue", "CalculatorDisplay.vue"]
for expected_component in expected_components:
# Find files that match this component name
matching_files = [path for path in dir_overview.keys() if expected_component in path]
assert len(matching_files) == 1, (
f"Component '{expected_component}' should appear exactly once in directory overview. "
f"Found {len(matching_files)} matches. All files: {list(dir_overview.keys())}"
)
# Verify the matched file has symbols
file_path = matching_files[0]
symbols = dir_overview[file_path]
assert isinstance(symbols, list), f"Symbols for {file_path} should be a list, got {type(symbols)}"
assert len(symbols) >= 1, f"Component {expected_component} should have at least 1 symbol in overview, got {len(symbols)}"
# Test overview for stores directory
stores_dir = os.path.join("src", "stores")
stores_overview = language_server.request_dir_overview(stores_dir)
assert isinstance(stores_overview, dict), f"Stores overview should be a dict, got: {type(stores_overview)}"
assert (
len(stores_overview) == 1
), f"src/stores directory should have exactly 1 file (calculator.ts) in overview, got {len(stores_overview)}"
# Verify calculator.ts is included
calculator_files = [path for path in stores_overview.keys() if "calculator.ts" in path]
assert len(calculator_files) == 1, (
f"calculator.ts should appear exactly once in stores directory overview. "
f"Found {len(calculator_files)} matches. All files: {list(stores_overview.keys())}"
)
# Verify the store file has symbols
store_path = calculator_files[0]
store_symbols = stores_overview[store_path]
store_symbol_names = [s.get("name") for s in store_symbols if isinstance(s, dict)]
assert (
"useCalculatorStore" in store_symbol_names
), f"calculator.ts should have useCalculatorStore in overview. Found {len(store_symbol_names)} symbols: {store_symbol_names}"
# Test overview for composables directory
composables_dir = os.path.join("src", "composables")
composables_overview = language_server.request_dir_overview(composables_dir)
assert isinstance(composables_overview, dict), f"Composables overview should be a dict, got: {type(composables_overview)}"
assert (
len(composables_overview) == 2
), f"src/composables directory should have exactly 2 files in overview, got {len(composables_overview)}"
# Verify composable files are included
expected_composables = ["useFormatter.ts", "useTheme.ts"]
for expected_composable in expected_composables:
matching_files = [path for path in composables_overview.keys() if expected_composable in path]
assert len(matching_files) == 1, (
f"Composable '{expected_composable}' should appear exactly once in directory overview. "
f"Found {len(matching_files)} matches. All files: {list(composables_overview.keys())}"
)
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/vue/test_vue_basic.py",
"license": "MIT License",
"lines": 288,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/vue/test_vue_error_cases.py | import os
import sys
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
pytestmark = pytest.mark.vue
IS_WINDOWS = sys.platform == "win32"
class TypeScriptServerBehavior:
"""Platform-specific TypeScript language server behavior for invalid positions.
On Windows: TS server returns empty results for invalid positions
On macOS/Linux: TS server raises exceptions with "Bad line number" or "Debug Failure"
"""
@staticmethod
def raises_on_invalid_position() -> bool:
return not IS_WINDOWS
@staticmethod
def returns_empty_on_invalid_position() -> bool:
return IS_WINDOWS
class TestVueInvalidPositions:
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_negative_line_number(self, language_server: SolidLanguageServer) -> None:
file_path = os.path.join("src", "components", "CalculatorInput.vue")
result = language_server.request_containing_symbol(file_path, -1, 0)
assert result is None or result == {}, f"Negative line number should return None or empty dict, got: {result}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_negative_character_number(self, language_server: SolidLanguageServer) -> None:
"""Test requesting containing symbol with negative character number.
Expected behavior: Should return None or empty dict, not crash.
"""
file_path = os.path.join("src", "components", "CalculatorInput.vue")
# Request containing symbol at invalid negative character
result = language_server.request_containing_symbol(file_path, 10, -1)
# Should handle gracefully - return None or empty dict
assert result is None or result == {}, f"Negative character number should return None or empty dict, got: {result}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_line_number_beyond_file_length(self, language_server: SolidLanguageServer) -> None:
"""Test requesting containing symbol beyond file length.
Expected behavior: Raises IndexError when trying to access line beyond file bounds.
This happens in the wrapper code before even reaching the language server.
"""
file_path = os.path.join("src", "components", "CalculatorInput.vue")
# Request containing symbol at line 99999 (way beyond file length)
# The wrapper code will raise an IndexError when checking if the line is empty
with pytest.raises(IndexError) as exc_info:
language_server.request_containing_symbol(file_path, 99999, 0)
# Verify it's an index error for list access
assert "list index out of range" in str(exc_info.value), f"Expected 'list index out of range' error, got: {exc_info.value}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_character_number_beyond_line_length(self, language_server: SolidLanguageServer) -> None:
"""Test requesting containing symbol beyond line length.
Expected behavior: Should return None or empty dict, not crash.
"""
file_path = os.path.join("src", "components", "CalculatorInput.vue")
# Request containing symbol at character 99999 (way beyond line length)
result = language_server.request_containing_symbol(file_path, 10, 99999)
# Should handle gracefully - return None or empty dict
assert result is None or result == {}, f"Character beyond line length should return None or empty dict, got: {result}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_references_at_negative_line(self, language_server: SolidLanguageServer) -> None:
"""Test requesting references with negative line number."""
from solidlsp.ls_exceptions import SolidLSPException
file_path = os.path.join("src", "components", "CalculatorInput.vue")
if TypeScriptServerBehavior.returns_empty_on_invalid_position():
result = language_server.request_references(file_path, -1, 0)
assert result == [], f"Expected empty list on Windows, got: {result}"
else:
with pytest.raises(SolidLSPException) as exc_info:
language_server.request_references(file_path, -1, 0)
assert "Bad line number" in str(exc_info.value) or "Debug Failure" in str(exc_info.value)
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_definition_at_invalid_position(self, language_server: SolidLanguageServer) -> None:
"""Test requesting definition at invalid position."""
from solidlsp.ls_exceptions import SolidLSPException
file_path = os.path.join("src", "components", "CalculatorInput.vue")
if TypeScriptServerBehavior.returns_empty_on_invalid_position():
result = language_server.request_definition(file_path, -1, 0)
assert result == [], f"Expected empty list on Windows, got: {result}"
else:
with pytest.raises(SolidLSPException) as exc_info:
language_server.request_definition(file_path, -1, 0)
assert "Bad line number" in str(exc_info.value) or "Debug Failure" in str(exc_info.value)
class TestVueNonExistentFiles:
"""Tests for handling non-existent files."""
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_document_symbols_nonexistent_file(self, language_server: SolidLanguageServer) -> None:
"""Test requesting document symbols from non-existent file.
Expected behavior: Should raise FileNotFoundError or return empty result.
"""
nonexistent_file = os.path.join("src", "components", "NonExistent.vue")
# Should raise an appropriate exception or return empty result
try:
result = language_server.request_document_symbols(nonexistent_file)
# If no exception, verify result is empty or indicates file not found
symbols = result.get_all_symbols_and_roots()
assert len(symbols[0]) == 0, f"Non-existent file should return empty symbols, got {len(symbols[0])} symbols"
except (FileNotFoundError, Exception) as e:
# Expected - file doesn't exist
assert True, f"Appropriately raised exception for non-existent file: {e}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_containing_symbol_nonexistent_file(self, language_server: SolidLanguageServer) -> None:
"""Test requesting containing symbol from non-existent file.
Expected behavior: Should raise FileNotFoundError or return None.
"""
nonexistent_file = os.path.join("src", "components", "NonExistent.vue")
# Should raise an appropriate exception or return None
try:
result = language_server.request_containing_symbol(nonexistent_file, 10, 10)
# If no exception, verify result indicates file not found
assert result is None or result == {}, f"Non-existent file should return None or empty dict, got: {result}"
except (FileNotFoundError, Exception) as e:
# Expected - file doesn't exist
assert True, f"Appropriately raised exception for non-existent file: {e}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_references_nonexistent_file(self, language_server: SolidLanguageServer) -> None:
"""Test requesting references from non-existent file.
Expected behavior: Should raise FileNotFoundError or return empty list.
"""
nonexistent_file = os.path.join("src", "components", "NonExistent.vue")
# Should raise an appropriate exception or return empty list
try:
result = language_server.request_references(nonexistent_file, 10, 10)
# If no exception, verify result is empty
assert result is None or isinstance(result, list), f"Non-existent file should return None or list, got: {result}"
if isinstance(result, list):
assert len(result) == 0, f"Non-existent file should return empty list, got {len(result)} references"
except (FileNotFoundError, Exception) as e:
# Expected - file doesn't exist
assert True, f"Appropriately raised exception for non-existent file: {e}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_definition_nonexistent_file(self, language_server: SolidLanguageServer) -> None:
"""Test requesting definition from non-existent file.
Expected behavior: Should raise FileNotFoundError or return empty list.
"""
nonexistent_file = os.path.join("src", "components", "NonExistent.vue")
# Should raise an appropriate exception or return empty list
try:
result = language_server.request_definition(nonexistent_file, 10, 10)
# If no exception, verify result is empty
assert isinstance(result, list), f"request_definition should return a list, got: {type(result)}"
assert len(result) == 0, f"Non-existent file should return empty list, got {len(result)} definitions"
except (FileNotFoundError, Exception) as e:
# Expected - file doesn't exist
assert True, f"Appropriately raised exception for non-existent file: {e}"
class TestVueUndefinedSymbols:
"""Tests for handling undefined or unreferenced symbols."""
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_references_for_unreferenced_symbol(self, language_server: SolidLanguageServer) -> None:
"""Test requesting references for a symbol that has no references.
Expected behavior: Should return empty list (only the definition itself if include_self=True).
"""
# Find a symbol that likely has no external references
file_path = os.path.join("src", "components", "CalculatorButton.vue")
# Get document symbols
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
# Find pressCount - this is exposed but may not be referenced elsewhere
press_count_symbol = next((s for s in symbols[0] if s.get("name") == "pressCount"), None)
if not press_count_symbol or "selectionRange" not in press_count_symbol:
pytest.skip("pressCount symbol not found - test fixture may need updating")
# Request references without include_self
sel_start = press_count_symbol["selectionRange"]["start"]
refs = language_server.request_references(file_path, sel_start["line"], sel_start["character"])
# Should return a list (may be empty or contain only definition)
assert isinstance(refs, list), f"request_references should return a list, got {type(refs)}"
# For an unreferenced symbol, should have 0-1 references (0 without include_self, 1 with)
# The exact count depends on the language server implementation
assert len(refs) <= 5, (
f"pressCount should have few or no external references. "
f"Got {len(refs)} references. This is not necessarily an error, just documenting behavior."
)
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_containing_symbol_at_whitespace_only_line(self, language_server: SolidLanguageServer) -> None:
"""Test requesting containing symbol at a whitespace-only line.
Expected behavior: Should return None, empty dict, or the parent symbol.
"""
file_path = os.path.join("src", "components", "CalculatorInput.vue")
# Try position at line 1 (typically a blank line or template start in Vue SFCs)
result = language_server.request_containing_symbol(file_path, 1, 0)
# Should handle gracefully - return None, empty dict, or a valid parent symbol
assert (
result is None or result == {} or isinstance(result, dict)
), f"Whitespace line should return None, empty dict, or valid symbol. Got: {result}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_definition_at_keyword_position(self, language_server: SolidLanguageServer) -> None:
"""Test requesting definition at language keyword position.
Expected behavior: Should return empty list or handle gracefully.
"""
file_path = os.path.join("src", "components", "CalculatorInput.vue")
# Try to get definition at a keyword like "const", "import", etc.
# Line 2 typically has "import" statement - try position on "import" keyword
result = language_server.request_definition(file_path, 2, 0)
# Should handle gracefully - return empty list or valid definitions
assert isinstance(result, list), f"request_definition should return a list, got {type(result)}"
class TestVueEdgeCasePositions:
"""Tests for edge case positions (0,0 and file boundaries)."""
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_containing_symbol_at_file_start(self, language_server: SolidLanguageServer) -> None:
"""Test requesting containing symbol at position (0,0).
Expected behavior: Should return None, empty dict, or a valid symbol.
This position typically corresponds to the start of the file (e.g., <template> tag).
"""
file_path = os.path.join("src", "components", "CalculatorInput.vue")
# Request containing symbol at position 0,0 (file start)
result = language_server.request_containing_symbol(file_path, 0, 0)
# Should handle gracefully
assert (
result is None or result == {} or isinstance(result, dict)
), f"Position 0,0 should return None, empty dict, or valid symbol. Got: {result}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_references_at_file_start(self, language_server: SolidLanguageServer) -> None:
"""Test requesting references at position (0,0).
Expected behavior: Should return None or empty list.
"""
file_path = os.path.join("src", "components", "CalculatorInput.vue")
# Request references at position 0,0 (file start)
result = language_server.request_references(file_path, 0, 0)
# Should handle gracefully
assert result is None or isinstance(result, list), f"Position 0,0 should return None or list. Got: {type(result)}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_definition_at_file_start(self, language_server: SolidLanguageServer) -> None:
"""Test requesting definition at position (0,0).
Expected behavior: Should return empty list.
"""
file_path = os.path.join("src", "components", "CalculatorInput.vue")
# Request definition at position 0,0 (file start)
result = language_server.request_definition(file_path, 0, 0)
# Should handle gracefully
assert isinstance(result, list), f"request_definition should return a list. Got: {type(result)}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_containing_symbol_in_template_section(self, language_server: SolidLanguageServer) -> None:
"""Test requesting containing symbol in the template section.
Expected behavior: Template positions typically have no containing symbol (return None or empty).
The Vue language server may not track template symbols the same way as script symbols.
"""
file_path = os.path.join("src", "components", "CalculatorInput.vue")
# Position likely in template section (early in file, before <script setup>)
# Exact line depends on file structure, but line 5-10 is often template
result = language_server.request_containing_symbol(file_path, 5, 10)
# Should handle gracefully - template doesn't have containing symbols in the same way
assert (
result is None or result == {} or isinstance(result, dict)
), f"Template position should return None, empty dict, or valid symbol. Got: {result}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_zero_character_positions(self, language_server: SolidLanguageServer) -> None:
"""Test requesting symbols at character position 0 (start of lines).
Expected behavior: Should handle gracefully, may or may not find symbols.
"""
file_path = os.path.join("src", "components", "CalculatorInput.vue")
# Test multiple lines at character 0
for line in [0, 10, 20, 30]:
result = language_server.request_containing_symbol(file_path, line, 0)
# Should handle gracefully
assert (
result is None or result == {} or isinstance(result, dict)
), f"Line {line}, character 0 should return None, empty dict, or valid symbol. Got: {result}"
class TestVueTypescriptFileErrors:
"""Tests for error handling in TypeScript files within Vue projects."""
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_typescript_file_invalid_position(self, language_server: SolidLanguageServer) -> None:
"""Test requesting symbols from TypeScript file at invalid position.
Expected behavior: Should handle gracefully.
"""
file_path = os.path.join("src", "stores", "calculator.ts")
# Request containing symbol at invalid position
result = language_server.request_containing_symbol(file_path, -1, -1)
# Should handle gracefully
assert result is None or result == {}, f"Invalid position in .ts file should return None or empty dict. Got: {result}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_typescript_file_beyond_bounds(self, language_server: SolidLanguageServer) -> None:
"""Test requesting symbols from TypeScript file beyond file bounds.
Expected behavior: Raises IndexError when trying to access line beyond file bounds.
"""
file_path = os.path.join("src", "stores", "calculator.ts")
# Request containing symbol beyond file bounds
# The wrapper code will raise an IndexError when checking if the line is empty
with pytest.raises(IndexError) as exc_info:
language_server.request_containing_symbol(file_path, 99999, 99999)
# Verify it's an index error for list access
assert "list index out of range" in str(exc_info.value), f"Expected 'list index out of range' error, got: {exc_info.value}"
class TestVueReferenceEdgeCases:
"""Tests for edge cases in reference finding."""
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_referencing_symbols_at_invalid_position(self, language_server: SolidLanguageServer) -> None:
"""Test requesting referencing symbols at invalid position."""
from solidlsp.ls_exceptions import SolidLSPException
file_path = os.path.join("src", "stores", "calculator.ts")
if TypeScriptServerBehavior.returns_empty_on_invalid_position():
result = list(language_server.request_referencing_symbols(file_path, -1, -1, include_self=False))
assert result == [], f"Expected empty list on Windows, got: {result}"
else:
with pytest.raises(SolidLSPException) as exc_info:
list(language_server.request_referencing_symbols(file_path, -1, -1, include_self=False))
assert "Bad line number" in str(exc_info.value) or "Debug Failure" in str(exc_info.value)
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_defining_symbol_at_invalid_position(self, language_server: SolidLanguageServer) -> None:
"""Test requesting defining symbol at invalid position."""
from solidlsp.ls_exceptions import SolidLSPException
file_path = os.path.join("src", "components", "CalculatorInput.vue")
if TypeScriptServerBehavior.returns_empty_on_invalid_position():
result = language_server.request_defining_symbol(file_path, -1, -1)
assert result is None, f"Expected None on Windows, got: {result}"
else:
with pytest.raises(SolidLSPException) as exc_info:
language_server.request_defining_symbol(file_path, -1, -1)
assert "Bad line number" in str(exc_info.value) or "Debug Failure" in str(exc_info.value)
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_referencing_symbols_beyond_file_bounds(self, language_server: SolidLanguageServer) -> None:
"""Test requesting referencing symbols beyond file bounds."""
from solidlsp.ls_exceptions import SolidLSPException
file_path = os.path.join("src", "stores", "calculator.ts")
if TypeScriptServerBehavior.returns_empty_on_invalid_position():
result = list(language_server.request_referencing_symbols(file_path, 99999, 99999, include_self=False))
assert result == [], f"Expected empty list on Windows, got: {result}"
else:
with pytest.raises(SolidLSPException) as exc_info:
list(language_server.request_referencing_symbols(file_path, 99999, 99999, include_self=False))
assert "Bad line number" in str(exc_info.value) or "Debug Failure" in str(exc_info.value)
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/vue/test_vue_error_cases.py",
"license": "MIT License",
"lines": 316,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/vue/test_vue_rename.py | import os
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
pytestmark = pytest.mark.vue
class TestVueRename:
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_rename_function_within_single_file(self, language_server: SolidLanguageServer) -> None:
file_path = os.path.join("src", "components", "CalculatorInput.vue")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
handle_digit_symbol = next((s for s in symbols[0] if s.get("name") == "handleDigit"), None)
if not handle_digit_symbol or "selectionRange" not in handle_digit_symbol:
pytest.skip("handleDigit symbol not found - test fixture may need updating")
sel_start = handle_digit_symbol["selectionRange"]["start"]
workspace_edit = language_server.request_rename_symbol_edit(file_path, sel_start["line"], sel_start["character"], "processDigit")
assert workspace_edit is not None, "Should return WorkspaceEdit for rename operation"
has_changes = "changes" in workspace_edit and workspace_edit["changes"]
has_document_changes = "documentChanges" in workspace_edit and workspace_edit["documentChanges"]
assert has_changes or has_document_changes, "WorkspaceEdit should contain either 'changes' or 'documentChanges'"
if has_changes:
changes = workspace_edit["changes"]
assert len(changes) > 0, "Should have at least one file with changes"
calculator_input_files = [uri for uri in changes.keys() if "CalculatorInput.vue" in uri]
assert len(calculator_input_files) > 0, f"Should have edits for CalculatorInput.vue. Found edits for: {list(changes.keys())}"
file_edits = changes[calculator_input_files[0]]
assert len(file_edits) > 0, "Should have at least one TextEdit for the renamed symbol"
for edit in file_edits:
assert "range" in edit, "TextEdit should have a range"
assert "newText" in edit, "TextEdit should have newText"
assert edit["newText"] == "processDigit", f"newText should be 'processDigit', got {edit['newText']}"
assert "start" in edit["range"], "Range should have start position"
assert "end" in edit["range"], "Range should have end position"
assert "line" in edit["range"]["start"], "Start position should have line number"
assert "character" in edit["range"]["start"], "Start position should have character offset"
elif has_document_changes:
document_changes = workspace_edit["documentChanges"]
assert isinstance(document_changes, list), "documentChanges should be a list"
assert len(document_changes) > 0, "Should have at least one document change"
calculator_input_changes = [dc for dc in document_changes if "CalculatorInput.vue" in dc.get("textDocument", {}).get("uri", "")]
assert len(calculator_input_changes) > 0, "Should have edits for CalculatorInput.vue"
for change in calculator_input_changes:
assert "textDocument" in change, "Document change should have textDocument"
assert "edits" in change, "Document change should have edits"
edits = change["edits"]
assert len(edits) > 0, "Should have at least one TextEdit for the renamed symbol"
for edit in edits:
assert "range" in edit, "TextEdit should have a range"
assert "newText" in edit, "TextEdit should have newText"
assert edit["newText"] == "processDigit", f"newText should be 'processDigit', got {edit['newText']}"
assert "start" in edit["range"], "Range should have start position"
assert "end" in edit["range"], "Range should have end position"
assert "line" in edit["range"]["start"], "Start position should have line number"
assert "character" in edit["range"]["start"], "Start position should have character offset"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_rename_composable_function_cross_file(self, language_server: SolidLanguageServer) -> None:
composable_file = os.path.join("src", "composables", "useFormatter.ts")
symbols = language_server.request_document_symbols(composable_file).get_all_symbols_and_roots()
use_formatter_symbol = next((s for s in symbols[0] if s.get("name") == "useFormatter"), None)
if not use_formatter_symbol or "selectionRange" not in use_formatter_symbol:
pytest.skip("useFormatter symbol not found - test fixture may need updating")
sel_start = use_formatter_symbol["selectionRange"]["start"]
workspace_edit = language_server.request_rename_symbol_edit(
composable_file, sel_start["line"], sel_start["character"], "useNumberFormatter"
)
assert workspace_edit is not None, "Should return WorkspaceEdit for cross-file rename"
has_changes = "changes" in workspace_edit and workspace_edit["changes"]
has_document_changes = "documentChanges" in workspace_edit and workspace_edit["documentChanges"]
assert has_changes or has_document_changes, "WorkspaceEdit should contain either 'changes' or 'documentChanges'"
if has_changes:
changes = workspace_edit["changes"]
assert len(changes) > 0, "Should have at least one file with changes"
composable_files = [uri for uri in changes.keys() if "useFormatter.ts" in uri]
assert len(composable_files) > 0, f"Should have edits for useFormatter.ts (definition). Found edits for: {list(changes.keys())}"
for uri, edits in changes.items():
assert len(edits) > 0, f"File {uri} should have at least one edit"
for edit in edits:
assert "range" in edit, f"TextEdit in {uri} should have a range"
assert "newText" in edit, f"TextEdit in {uri} should have newText"
assert edit["newText"] == "useNumberFormatter", f"newText should be 'useNumberFormatter', got {edit['newText']}"
assert "start" in edit["range"], f"Range in {uri} should have start position"
assert "end" in edit["range"], f"Range in {uri} should have end position"
elif has_document_changes:
document_changes = workspace_edit["documentChanges"]
assert isinstance(document_changes, list), "documentChanges should be a list"
assert len(document_changes) > 0, "Should have at least one document change"
composable_changes = [dc for dc in document_changes if "useFormatter.ts" in dc.get("textDocument", {}).get("uri", "")]
assert (
len(composable_changes) > 0
), f"Should have edits for useFormatter.ts (definition). Found changes for: {[dc.get('textDocument', {}).get('uri', '') for dc in document_changes]}"
for change in document_changes:
assert "textDocument" in change, "Document change should have textDocument"
assert "edits" in change, "Document change should have edits"
uri = change["textDocument"]["uri"]
edits = change["edits"]
assert len(edits) > 0, f"File {uri} should have at least one edit"
for edit in edits:
assert "range" in edit, f"TextEdit in {uri} should have a range"
assert "newText" in edit, f"TextEdit in {uri} should have newText"
assert edit["newText"] == "useNumberFormatter", f"newText should be 'useNumberFormatter', got {edit['newText']}"
assert "start" in edit["range"], f"Range in {uri} should have start position"
assert "end" in edit["range"], f"Range in {uri} should have end position"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_rename_verifies_correct_file_paths_and_ranges(self, language_server: SolidLanguageServer) -> None:
file_path = os.path.join("src", "App.vue")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
app_title_symbol = next((s for s in symbols[0] if s.get("name") == "appTitle"), None)
if not app_title_symbol or "selectionRange" not in app_title_symbol:
pytest.skip("appTitle symbol not found - test fixture may need updating")
sel_start = app_title_symbol["selectionRange"]["start"]
workspace_edit = language_server.request_rename_symbol_edit(
file_path, sel_start["line"], sel_start["character"], "applicationTitle"
)
assert workspace_edit is not None, "Should return WorkspaceEdit for rename operation"
assert isinstance(workspace_edit, dict), "WorkspaceEdit should be a dictionary"
has_changes = "changes" in workspace_edit and workspace_edit["changes"]
has_document_changes = "documentChanges" in workspace_edit and workspace_edit["documentChanges"]
assert has_changes or has_document_changes, "WorkspaceEdit must have 'changes' or 'documentChanges'"
if has_changes:
changes = workspace_edit["changes"]
assert isinstance(changes, dict), "changes should be a dict mapping URIs to TextEdit lists"
assert len(changes) > 0, "Should have edits for at least one file"
for uri, edits in changes.items():
assert isinstance(uri, str), f"URI should be a string, got {type(uri)}"
assert uri.startswith("file://"), f"URI should start with 'file://', got {uri}"
assert isinstance(edits, list), f"Edits for {uri} should be a list, got {type(edits)}"
assert len(edits) > 0, f"Should have at least one edit for {uri}"
for idx, edit in enumerate(edits):
assert isinstance(edit, dict), f"Edit {idx} in {uri} should be a dict, got {type(edit)}"
assert "range" in edit, f"Edit {idx} in {uri} missing 'range'"
assert "newText" in edit, f"Edit {idx} in {uri} missing 'newText'"
range_obj = edit["range"]
assert "start" in range_obj, f"Edit {idx} range in {uri} missing 'start'"
assert "end" in range_obj, f"Edit {idx} range in {uri} missing 'end'"
for pos_name in ["start", "end"]:
pos = range_obj[pos_name]
assert "line" in pos, f"Edit {idx} range {pos_name} in {uri} missing 'line'"
assert "character" in pos, f"Edit {idx} range {pos_name} in {uri} missing 'character'"
assert isinstance(pos["line"], int), f"Line should be int, got {type(pos['line'])}"
assert isinstance(pos["character"], int), f"Character should be int, got {type(pos['character'])}"
assert pos["line"] >= 0, f"Line number should be >= 0, got {pos['line']}"
assert pos["character"] >= 0, f"Character offset should be >= 0, got {pos['character']}"
assert isinstance(edit["newText"], str), f"newText should be string, got {type(edit['newText'])}"
assert edit["newText"] == "applicationTitle", f"newText should be 'applicationTitle', got {edit['newText']}"
elif has_document_changes:
document_changes = workspace_edit["documentChanges"]
assert isinstance(document_changes, list), "documentChanges should be a list"
assert len(document_changes) > 0, "Should have at least one document change"
for change in document_changes:
assert isinstance(change, dict), "Each document change should be a dict"
assert "textDocument" in change, "Document change should have textDocument"
assert "edits" in change, "Document change should have edits"
text_doc = change["textDocument"]
assert "uri" in text_doc, "textDocument should have uri"
assert text_doc["uri"].startswith("file://"), f"URI should start with 'file://', got {text_doc['uri']}"
edits = change["edits"]
assert isinstance(edits, list), "edits should be a list"
assert len(edits) > 0, "Should have at least one edit"
for idx, edit in enumerate(edits):
assert isinstance(edit, dict), f"Edit {idx} in {text_doc['uri']} should be a dict, got {type(edit)}"
assert "range" in edit, f"Edit {idx} in {text_doc['uri']} missing 'range'"
assert "newText" in edit, f"Edit {idx} in {text_doc['uri']} missing 'newText'"
range_obj = edit["range"]
assert "start" in range_obj, f"Edit {idx} range in {text_doc['uri']} missing 'start'"
assert "end" in range_obj, f"Edit {idx} range in {text_doc['uri']} missing 'end'"
for pos_name in ["start", "end"]:
pos = range_obj[pos_name]
assert "line" in pos, f"Edit {idx} range {pos_name} in {text_doc['uri']} missing 'line'"
assert "character" in pos, f"Edit {idx} range {pos_name} in {text_doc['uri']} missing 'character'"
assert isinstance(pos["line"], int), f"Line should be int, got {type(pos['line'])}"
assert isinstance(pos["character"], int), f"Character should be int, got {type(pos['character'])}"
assert pos["line"] >= 0, f"Line number should be >= 0, got {pos['line']}"
assert pos["character"] >= 0, f"Character offset should be >= 0, got {pos['character']}"
assert isinstance(edit["newText"], str), f"newText should be string, got {type(edit['newText'])}"
assert edit["newText"] == "applicationTitle", f"newText should be 'applicationTitle', got {edit['newText']}"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/vue/test_vue_rename.py",
"license": "MIT License",
"lines": 174,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/vue/test_vue_symbol_retrieval.py | import os
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
from solidlsp.ls_types import SymbolKind
pytestmark = pytest.mark.vue
class TestVueSymbolRetrieval:
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_request_containing_symbol_script_setup_function(self, language_server: SolidLanguageServer) -> None:
file_path = os.path.join("src", "components", "CalculatorInput.vue")
# First, get the document symbols to find the handleDigit function
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
handle_digit_symbol = next((s for s in symbols[0] if s.get("name") == "handleDigit"), None)
if not handle_digit_symbol or "range" not in handle_digit_symbol:
pytest.skip("handleDigit symbol not found - test fixture may need updating")
# Get a position inside the handleDigit function body
# We'll use a line a few lines after the function start
func_start_line = handle_digit_symbol["range"]["start"]["line"]
position_inside_func = func_start_line + 1
position_character = 4
# Request the containing symbol for this position
containing_symbol = language_server.request_containing_symbol(
file_path, position_inside_func, position_character, include_body=True
)
# Verify we found the correct containing symbol
assert containing_symbol is not None, "Should find containing symbol inside handleDigit function"
assert containing_symbol["name"] == "handleDigit", f"Expected handleDigit, got {containing_symbol.get('name')}"
assert containing_symbol["kind"] in [
SymbolKind.Function,
SymbolKind.Method,
SymbolKind.Variable,
], f"Expected function-like kind, got {containing_symbol.get('kind')}"
# Verify the body is included if available
if "body" in containing_symbol:
assert "handleDigit" in containing_symbol["body"].get_text(), "Function body should contain function name"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_request_containing_symbol_computed_property(self, language_server: SolidLanguageServer) -> None:
file_path = os.path.join("src", "components", "CalculatorInput.vue")
# Find the formattedDisplay computed property
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
formatted_display_symbol = next((s for s in symbols[0] if s.get("name") == "formattedDisplay"), None)
if not formatted_display_symbol or "range" not in formatted_display_symbol:
pytest.skip("formattedDisplay computed property not found - test fixture may need updating")
# Get a position inside the computed property body
computed_start_line = formatted_display_symbol["range"]["start"]["line"]
position_inside_computed = computed_start_line + 1
position_character = 4
# Request the containing symbol for this position
containing_symbol = language_server.request_containing_symbol(
file_path, position_inside_computed, position_character, include_body=True
)
# Verify we found the correct containing symbol
# The language server returns the arrow function inside computed() rather than
# the variable name. This is technically correct from LSP's perspective.
assert containing_symbol is not None, "Should find containing symbol inside computed property"
assert containing_symbol["name"] in [
"formattedDisplay",
"computed() callback",
], f"Expected formattedDisplay or computed() callback, got {containing_symbol.get('name')}"
assert containing_symbol["kind"] in [
SymbolKind.Property,
SymbolKind.Variable,
SymbolKind.Function,
], f"Expected property/variable/function kind for computed, got {containing_symbol.get('kind')}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_request_containing_symbol_no_containing_symbol(self, language_server: SolidLanguageServer) -> None:
file_path = os.path.join("src", "components", "CalculatorInput.vue")
# Position in the import statements at the top of the script setup
# Line 1-6 contain imports in CalculatorInput.vue
import_line = 2
import_character = 10
# Request containing symbol for a position in the imports
containing_symbol = language_server.request_containing_symbol(file_path, import_line, import_character)
# Should return None or empty dictionary for positions without containing symbol
assert (
containing_symbol is None or containing_symbol == {}
), f"Expected None or empty dict for import position, got {containing_symbol}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_request_referencing_symbols_store_function(self, language_server: SolidLanguageServer) -> None:
store_file = os.path.join("src", "stores", "calculator.ts")
# Find the 'add' action in the calculator store
symbols = language_server.request_document_symbols(store_file).get_all_symbols_and_roots()
add_symbol = next((s for s in symbols[0] if s.get("name") == "add"), None)
if not add_symbol or "selectionRange" not in add_symbol:
pytest.skip("add action not found in calculator store - test fixture may need updating")
# Request referencing symbols for the add action (include_self=True to get at least the definition)
sel_start = add_symbol["selectionRange"]["start"]
ref_symbols = [
ref.symbol
for ref in language_server.request_referencing_symbols(store_file, sel_start["line"], sel_start["character"], include_self=True)
]
assert isinstance(ref_symbols, list), f"request_referencing_symbols should return a list, got {type(ref_symbols)}"
for symbol in ref_symbols:
assert "name" in symbol, "Referencing symbol should have a name"
assert "kind" in symbol, "Referencing symbol should have a kind"
vue_refs = [
symbol for symbol in ref_symbols if "location" in symbol and "uri" in symbol["location"] and ".vue" in symbol["location"]["uri"]
]
if len(vue_refs) > 0:
calculator_input_refs = [
ref
for ref in vue_refs
if "location" in ref and "uri" in ref["location"] and "CalculatorInput.vue" in ref["location"]["uri"]
]
for ref in calculator_input_refs:
assert "name" in ref, "Reference should have name"
assert "location" in ref, "Reference should have location"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_request_referencing_symbols_composable(self, language_server: SolidLanguageServer) -> None:
composable_file = os.path.join("src", "composables", "useFormatter.ts")
# Find the useFormatter composable function
symbols = language_server.request_document_symbols(composable_file).get_all_symbols_and_roots()
use_formatter_symbol = next((s for s in symbols[0] if s.get("name") == "useFormatter"), None)
if not use_formatter_symbol or "selectionRange" not in use_formatter_symbol:
pytest.skip("useFormatter composable not found - test fixture may need updating")
# Request referencing symbols for the composable
sel_start = use_formatter_symbol["selectionRange"]["start"]
ref_symbols = [
ref.symbol for ref in language_server.request_referencing_symbols(composable_file, sel_start["line"], sel_start["character"])
]
# Verify we found references - useFormatter is imported and used in CalculatorInput.vue
assert (
len(ref_symbols) >= 1
), f"useFormatter should have at least 1 reference (used in CalculatorInput.vue), found {len(ref_symbols)} references"
# Check for references in Vue components
vue_refs = [
symbol for symbol in ref_symbols if "location" in symbol and "uri" in symbol["location"] and ".vue" in symbol["location"]["uri"]
]
# CalculatorInput.vue imports and uses useFormatter
assert len(vue_refs) >= 1, f"Should find at least 1 Vue component reference to useFormatter, found {len(vue_refs)}"
# Verify we found reference in CalculatorInput.vue specifically
has_calculator_input_ref = any(
"CalculatorInput.vue" in ref["location"]["uri"] for ref in vue_refs if "location" in ref and "uri" in ref["location"]
)
assert has_calculator_input_ref, (
f"Should find reference to useFormatter in CalculatorInput.vue. "
f"Found references in: {[ref['location']['uri'] for ref in vue_refs if 'location' in ref and 'uri' in ref['location']]}"
)
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_vue_component_cross_references(self, language_server: SolidLanguageServer) -> None:
input_file = os.path.join("src", "components", "CalculatorInput.vue")
button_file = os.path.join("src", "components", "CalculatorButton.vue")
definitions = language_server.request_definition(input_file, 4, 10)
assert len(definitions) == 1, f"Should find exactly 1 definition for CalculatorButton import, got {len(definitions)}"
assert (
"CalculatorButton.vue" in definitions[0]["relativePath"]
), f"Definition should point to CalculatorButton.vue, got {definitions[0]['relativePath']}"
refs = language_server.request_references(input_file, 4, 10)
assert len(refs) >= 2, (
f"Should find at least 2 references to CalculatorButton (import + template usages). "
f"In CalculatorInput.vue, CalculatorButton is imported and used ~7 times in template. Found {len(refs)} references"
)
button_symbols = language_server.request_document_symbols(button_file).get_all_symbols_and_roots()
symbol_names = [s.get("name") for s in button_symbols[0]]
assert "Props" in symbol_names, "CalculatorButton.vue should have Props interface"
assert "handleClick" in symbol_names, "CalculatorButton.vue should have handleClick function"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_request_defining_symbol_import_resolution(self, language_server: SolidLanguageServer) -> None:
file_path = os.path.join("src", "components", "CalculatorInput.vue")
# Find the import position for useCalculatorStore
# In CalculatorInput.vue (0-indexed lines):
# Line 2: import { useCalculatorStore } from '@/stores/calculator'
# Line 8: const store = useCalculatorStore()
# We'll request definition at the position of "useCalculatorStore" in the usage line
defining_symbol = language_server.request_defining_symbol(file_path, 8, 18)
if defining_symbol is None:
# Some language servers may not support go-to-definition at usage sites
# Try at line 2 (import statement) instead
defining_symbol = language_server.request_defining_symbol(file_path, 2, 18)
# Verify we found a defining symbol
assert defining_symbol is not None, "Should find defining symbol for useCalculatorStore"
assert "name" in defining_symbol, "Defining symbol should have a name"
assert defining_symbol.get("name") in [
"useCalculatorStore",
"calculator",
], f"Expected useCalculatorStore or calculator, got {defining_symbol.get('name')}"
# Verify it points to the store file
if "location" in defining_symbol and "uri" in defining_symbol["location"]:
assert (
"calculator.ts" in defining_symbol["location"]["uri"]
), f"Should point to calculator.ts, got {defining_symbol['location']['uri']}"
@pytest.mark.parametrize("language_server", [Language.VUE], indirect=True)
def test_request_defining_symbol_component_import(self, language_server: SolidLanguageServer) -> None:
file_path = os.path.join("src", "components", "CalculatorInput.vue")
definitions = language_server.request_definition(file_path, 4, 10)
assert len(definitions) > 0, "Should find definition for CalculatorButton import"
definition = definitions[0]
assert definition["relativePath"] is not None, "Definition should have a relative path"
assert (
"CalculatorButton.vue" in definition["relativePath"]
), f"Should point to CalculatorButton.vue, got {definition['relativePath']}"
assert definition["range"]["start"]["line"] == 0, "Definition should point to start of .vue file"
defining_symbol = language_server.request_defining_symbol(file_path, 4, 10)
assert defining_symbol is None or "name" in defining_symbol, "If defining_symbol is found, it should have a name"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/vue/test_vue_symbol_retrieval.py",
"license": "MIT License",
"lines": 194,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:repo_dir_sync.py | # -*- coding: utf-8 -*-
import glob
import os
import shutil
from subprocess import Popen, PIPE
import re
import sys
from typing import List, Optional, Sequence
import platform
def popen(cmd):
shell = platform.system() != "Windows"
p = Popen(cmd, shell=shell, stdin=PIPE, stdout=PIPE)
return p
def call(cmd):
p = popen(cmd)
return p.stdout.read().decode("utf-8")
def execute(cmd, exceptionOnError=True):
"""
:param cmd: the command to execute
:param exceptionOnError: if True, raise on exception on error (return code not 0); if False return
whether the call was successful
:return: True if the call was successful, False otherwise (if exceptionOnError==False)
"""
p = popen(cmd)
p.wait()
success = p.returncode == 0
if exceptionOnError:
if not success:
raise Exception("Command failed: %s" % cmd)
else:
return success
def gitLog(path, arg):
oldPath = os.getcwd()
os.chdir(path)
lg = call("git log --no-merges " + arg)
os.chdir(oldPath)
return lg
def gitCommit(msg):
with open(COMMIT_MSG_FILENAME, "wb") as f:
f.write(msg.encode("utf-8"))
gitCommitWithMessageFromFile(COMMIT_MSG_FILENAME)
def gitCommitWithMessageFromFile(commitMsgFilename):
if not os.path.exists(commitMsgFilename):
raise FileNotFoundError(f"{commitMsgFilename} not found in {os.path.abspath(os.getcwd())}")
os.system(f"git commit --file={commitMsgFilename}")
os.unlink(commitMsgFilename)
COMMIT_MSG_FILENAME = "commitmsg.txt"
class OtherRepo:
SYNC_COMMIT_ID_FILE_LIB_REPO = ".syncCommitId.remote"
SYNC_COMMIT_ID_FILE_THIS_REPO = ".syncCommitId.this"
SYNC_COMMIT_MESSAGE = f"Updated %s sync commit identifiers"
SYNC_BACKUP_DIR = ".syncBackup"
def __init__(self, name, branch, pathToLib):
self.pathToLibInThisRepo = os.path.abspath(pathToLib)
if not os.path.exists(self.pathToLibInThisRepo):
raise ValueError(f"Repository directory '{self.pathToLibInThisRepo}' does not exist")
self.name = name
self.branch = branch
self.libRepo: Optional[LibRepo] = None
def isSyncEstablished(self):
return os.path.exists(os.path.join(self.pathToLibInThisRepo, self.SYNC_COMMIT_ID_FILE_LIB_REPO))
def lastSyncIdThisRepo(self):
with open(os.path.join(self.pathToLibInThisRepo, self.SYNC_COMMIT_ID_FILE_THIS_REPO), "r") as f:
commitId = f.read().strip()
return commitId
def lastSyncIdLibRepo(self):
with open(os.path.join(self.pathToLibInThisRepo, self.SYNC_COMMIT_ID_FILE_LIB_REPO), "r") as f:
commitId = f.read().strip()
return commitId
def gitLogThisRepoSinceLastSync(self):
lg = gitLog(self.pathToLibInThisRepo, '--name-only HEAD "^%s" .' % self.lastSyncIdThisRepo())
lg = re.sub(r'commit [0-9a-z]{8,40}\n.*\n.*\n\s*\n.*\n\s*(\n.*\.syncCommitId\.(this|remote))+', r"", lg, flags=re.MULTILINE) # remove commits with sync commit id update
indent = " "
lg = indent + lg.replace("\n", "\n" + indent)
return lg
def gitLogLibRepoSinceLastSync(self, libRepo: "LibRepo"):
syncIdFile = os.path.join(self.pathToLibInThisRepo, self.SYNC_COMMIT_ID_FILE_LIB_REPO)
if not os.path.exists(syncIdFile):
return ""
with open(syncIdFile, "r") as f:
syncId = f.read().strip()
lg = gitLog(libRepo.libPath, '--name-only HEAD "^%s" .' % syncId)
lg = re.sub(r"Sync (\w+)\n\s*\n", r"Sync\n\n", lg, flags=re.MULTILINE)
indent = " "
lg = indent + lg.replace("\n", "\n" + indent)
return "\n\n" + lg
def _userInputYesNo(self, question) -> bool:
result = None
while result not in ("y", "n"):
result = input(question + " [y|n]: ").strip()
return result == "y"
def pull(self, libRepo: "LibRepo"):
"""
Pulls in changes from this repository into the lib repo
"""
# switch to branch in lib repo
os.chdir(libRepo.rootPath)
execute("git checkout %s" % self.branch)
# check if the branch contains the commit that is referenced as the remote commit
remoteCommitId = self.lastSyncIdLibRepo()
remoteCommitExists = execute("git rev-list HEAD..%s" % remoteCommitId, exceptionOnError=False)
if not remoteCommitExists:
if not self._userInputYesNo(f"\nWARNING: The referenced remote commit {remoteCommitId} does not exist "
f"in your {self.libRepo.name} branch '{self.branch}'!\nSomeone else may have "
f"pulled/pushed in the meantime.\nIt is recommended that you do not continue. "
f"Continue?"):
return
# check if this branch is clean
lgLib = self.gitLogLibRepoSinceLastSync(libRepo).strip()
if lgLib != "":
print(f"The following changes have been added to this branch in the library:\n\n{lgLib}\n\n")
print(f"ERROR: You must push these changes before you can pull or reset this branch to {remoteCommitId}")
sys.exit(1)
# get log with relevant commits in this repo that are to be pulled
lg = self.gitLogThisRepoSinceLastSync()
os.chdir(libRepo.rootPath)
# create commit message file
commitMsg = f"Sync {self.name}\n\n" + lg
with open(COMMIT_MSG_FILENAME, "w") as f:
f.write(commitMsg)
# ask whether to commit these changes
print("Relevant commits:\n\n" + lg + "\n\n")
if not self._userInputYesNo(f"The above changes will be pulled from {self.name}.\n"
f"You may change the commit message by editing {os.path.abspath(COMMIT_MSG_FILENAME)}.\n"
"Continue?"):
os.unlink(COMMIT_MSG_FILENAME)
return
# prepare restoration of ignored files
self.prepare_restoration_of_ignored_files(libRepo.rootPath)
# remove library tree in lib repo
shutil.rmtree(self.libRepo.libDirectory)
# copy tree from this repo to lib repo (but drop the sync commit id files)
shutil.copytree(self.pathToLibInThisRepo, self.libRepo.libDirectory)
for fn in (self.SYNC_COMMIT_ID_FILE_LIB_REPO, self.SYNC_COMMIT_ID_FILE_THIS_REPO):
p = os.path.join(self.libRepo.libDirectory, fn)
if os.path.exists(p):
os.unlink(p)
# restore ignored directories/files
self.restore_ignored_files(libRepo.rootPath)
# make commit in lib repo
os.system("git add %s" % self.libRepo.libDirectory)
gitCommitWithMessageFromFile(COMMIT_MSG_FILENAME)
newSyncCommitIdLibRepo = call("git rev-parse HEAD").strip()
# update commit ids in this repo
os.chdir(self.pathToLibInThisRepo)
newSyncCommitIdThisRepo = call("git rev-parse HEAD").strip()
with open(self.SYNC_COMMIT_ID_FILE_LIB_REPO, "w") as f:
f.write(newSyncCommitIdLibRepo)
with open(self.SYNC_COMMIT_ID_FILE_THIS_REPO, "w") as f:
f.write(newSyncCommitIdThisRepo)
execute('git add %s %s' % (self.SYNC_COMMIT_ID_FILE_LIB_REPO, self.SYNC_COMMIT_ID_FILE_THIS_REPO))
execute(f'git commit -m "{self.SYNC_COMMIT_MESSAGE % self.libRepo.name} (pull)"')
print(f"\n\nIf everything was successful, you should now push your changes to branch "
f"'{self.branch}'\nand get your branch merged into develop (issuing a pull request where appropriate)")
def push(self, libRepo: "LibRepo"):
"""
Pushes changes from the lib repo to this repo
"""
os.chdir(libRepo.rootPath)
# switch to the source repo branch
execute(f"git checkout {self.branch}")
if self.isSyncEstablished():
# check if there are any commits that have not yet been pulled
unpulledCommits = self.gitLogThisRepoSinceLastSync().strip()
if unpulledCommits != "":
print(f"\n{unpulledCommits}\n\n")
if not self._userInputYesNo(f"WARNING: The above changes in repository '{self.name}' have not"
f" yet been pulled.\nYou might want to pull them.\n"
f"If you continue with the push, they will be lost. Continue?"):
return
# get change log in lib repo since last sync
libLogSinceLastSync = self.gitLogLibRepoSinceLastSync(libRepo)
print("Relevant commits:\n\n" + libLogSinceLastSync + "\n\n")
if not self._userInputYesNo("The above changes will be pushed. Continue?"):
return
print()
else:
libLogSinceLastSync = ""
# prepare restoration of ignored files in target repo
base_dir_this_repo = os.path.join(self.pathToLibInThisRepo, "..")
self.prepare_restoration_of_ignored_files(base_dir_this_repo)
# remove the target repo tree and update it with the tree from the source repo
shutil.rmtree(self.pathToLibInThisRepo)
shutil.copytree(libRepo.libPath, self.pathToLibInThisRepo)
# get the commit id of the source repo we just copied
commitId = call("git rev-parse HEAD").strip()
# restore ignored directories and files
self.restore_ignored_files(base_dir_this_repo)
# go to the target repo
os.chdir(self.pathToLibInThisRepo)
# commit new version in this repo
execute("git add .")
with open(self.SYNC_COMMIT_ID_FILE_LIB_REPO, "w") as f:
f.write(commitId)
execute("git add %s" % self.SYNC_COMMIT_ID_FILE_LIB_REPO)
gitCommit(f"{self.libRepo.name} {commitId}" + libLogSinceLastSync)
commitId = call("git rev-parse HEAD").strip()
# update information on the commit id we just added
with open(self.SYNC_COMMIT_ID_FILE_THIS_REPO, "w") as f:
f.write(commitId)
execute("git add %s" % self.SYNC_COMMIT_ID_FILE_THIS_REPO)
execute(f'git commit -m "{self.SYNC_COMMIT_MESSAGE % self.libRepo.name} (push)"')
os.chdir(libRepo.rootPath)
print(f"\n\nIf everything was successful, you should now update the remote branch:\ngit push")
def prepare_restoration_of_ignored_files(self, base_dir: str):
"""
:param base_dir: the directory containing the lib directory, to which ignored paths are relative
"""
cwd = os.getcwd()
os.chdir(base_dir)
# ensure backup dir exists and is empty
if os.path.exists(self.SYNC_BACKUP_DIR):
shutil.rmtree(self.SYNC_BACKUP_DIR)
os.mkdir(self.SYNC_BACKUP_DIR)
# backup ignored, unversioned directories
for d in self.libRepo.fullyIgnoredUnversionedDirectories:
if os.path.exists(d):
shutil.copytree(d, os.path.join(self.SYNC_BACKUP_DIR, d))
os.chdir(cwd)
def restore_ignored_files(self, base_dir: str):
"""
:param base_dir: the directory containing the lib directory, to which ignored paths are relative
"""
cwd = os.getcwd()
os.chdir(base_dir)
# remove fully ignored directories that were overwritten by the sync
for d in self.libRepo.fullyIgnoredVersionedDirectories + self.libRepo.fullyIgnoredUnversionedDirectories:
if os.path.exists(d):
print("Removing overwritten content: %s" % d)
shutil.rmtree(d)
# restore directories and files that can be restored via git
for d in self.libRepo.ignoredDirectories + self.libRepo.fullyIgnoredVersionedDirectories:
restoration_cmd = "git checkout %s" % d
print("Restoring: %s" % restoration_cmd)
os.system(restoration_cmd)
for pattern in self.libRepo.ignoredFileGlobPatterns:
for path in glob.glob(pattern, recursive=True):
print("Restoring via git: %s" % path)
os.system("git checkout %s" % path)
# restore directories that were backed up
for d in self.libRepo.fullyIgnoredUnversionedDirectories:
if os.path.exists(os.path.join(self.SYNC_BACKUP_DIR, d)):
print("Restoring from backup: %s" % d)
shutil.copytree(os.path.join(self.SYNC_BACKUP_DIR, d), d)
# remove backup dir
shutil.rmtree(self.SYNC_BACKUP_DIR)
os.chdir(cwd)
class LibRepo:
def __init__(self, name: str, libDirectory: str,
ignoredDirectories: Sequence[str] = (),
fullyIgnoredVersionedDirectories: Sequence[str] = (),
fullyIgnoredUnversionedDirectories: Sequence[str] = (),
ignoredFileGlobPatterns: Sequence[str] = ()
):
"""
:param name: name of the library being synced
:param libDirectory: relative path to the library directory within this repo
:param ignoredDirectories: ignored directories; existing files in ignored directories will be restored
via 'git checkout' on pull/push, but new files will be added.
This is useful for configuration-like files, where users may have local changes that should not
be overwritten, but new files should still be added.
:param fullyIgnoredVersionedDirectories:
fully ignored versioned directories will be restored to original state after push/pull via git checkout
:param fullyIgnoredUnversionedDirectories:
fully ignored unversioned directories will be backed up and restored to original state after push/pull
:param ignoredFileGlobPatterns: files matching ignored glob patterns will be restored via 'git checkout'
on pull/push
"""
self.name = name
self.rootPath = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
self.libDirectory = libDirectory
self.libPath = os.path.join(self.rootPath, self.libDirectory)
self.ignoredDirectories: List[str] = list(ignoredDirectories)
self.fullyIgnoredVersionedDirectories: List[str] = list(fullyIgnoredVersionedDirectories)
self.fullyIgnoredUnversionedDirectories: List[str] = list(fullyIgnoredUnversionedDirectories)
self.ignoredFileGlobPatterns: List[str] = list(ignoredFileGlobPatterns)
self.otherRepos: List[OtherRepo] = []
def add(self, repo: OtherRepo):
repo.libRepo = self
self.otherRepos.append(repo)
def runMain(self):
repos = self.otherRepos
args = sys.argv[1:]
if len(args) != 2:
print(f"usage: sync.py <{'|'.join([repo.name for repo in repos])}> <push|pull>")
else:
repo = [r for r in repos if r.name == args[0]]
if len(repo) != 1:
raise ValueError(f"Unknown repo '{args[0]}'")
repo = repo[0]
if args[1] == "push":
repo.push(self)
elif args[1] == "pull":
repo.pull(self)
else:
raise ValueError(f"Unknown command '{args[1]}'")
| {
"repo_id": "oraios/serena",
"file_path": "repo_dir_sync.py",
"license": "MIT License",
"lines": 296,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:scripts/profile_tool_call.py | import cProfile
from pathlib import Path
from typing import Literal
from sensai.util import logging
from sensai.util.logging import LogTime
from sensai.util.profiling import profiled
from serena.agent import SerenaAgent
from serena.config.serena_config import SerenaConfig
from serena.tools import FindSymbolTool
log = logging.getLogger(__name__)
if __name__ == "__main__":
logging.configure()
# The profiler to use:
# Use pyinstrument for hierarchical profiling output
# Use cProfile to determine which functions take the most time overall (and use snakeviz to visualize)
profiler: Literal["pyinstrument", "cprofile"] = "cprofile"
project_path = Path(__file__).parent.parent # Serena root
serena_config = SerenaConfig.from_config_file()
serena_config.log_level = logging.INFO
serena_config.gui_log_window = False
serena_config.web_dashboard = False
agent = SerenaAgent(str(project_path), serena_config=serena_config)
# wait for language server to be ready
agent.execute_task(lambda: log.info("Language server is ready."))
def tool_call():
"""This is the function we want to profile."""
# NOTE: We use apply (not apply_ex) to run the tool call directly on the main thread
with LogTime("Tool call"):
result = agent.get_tool(FindSymbolTool).apply(name_path="DQN")
log.info("Tool result:\n%s", result)
if profiler == "pyinstrument":
@profiled(log_to_file=True)
def profiled_tool_call():
tool_call()
profiled_tool_call()
elif profiler == "cprofile":
cProfile.run("tool_call()", "tool_call.pstat")
| {
"repo_id": "oraios/serena",
"file_path": "scripts/profile_tool_call.py",
"license": "MIT License",
"lines": 37,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
oraios/serena:src/serena/ls_manager.py | import logging
import os.path
import threading
from collections.abc import Iterator
from sensai.util.logging import LogTime
from serena.config.serena_config import SerenaPaths
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language, LanguageServerConfig
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class LanguageServerManagerInitialisationError(Exception):
def __init__(self, message: str):
super().__init__(message)
class LanguageServerFactory:
def __init__(
self,
project_root: str,
project_data_path: str,
encoding: str,
ignored_patterns: list[str],
ls_timeout: float | None = None,
ls_specific_settings: dict | None = None,
trace_lsp_communication: bool = False,
):
self.project_root = project_root
self.project_data_path = project_data_path
self.encoding = encoding
self.ignored_patterns = ignored_patterns
self.ls_timeout = ls_timeout
self.ls_specific_settings = ls_specific_settings
self.trace_lsp_communication = trace_lsp_communication
def create_language_server(self, language: Language) -> SolidLanguageServer:
ls_config = LanguageServerConfig(
code_language=language,
ignored_paths=self.ignored_patterns,
trace_lsp_communication=self.trace_lsp_communication,
encoding=self.encoding,
)
log.info(f"Creating language server instance for {self.project_root}, language={language}.")
return SolidLanguageServer.create(
ls_config,
self.project_root,
timeout=self.ls_timeout,
solidlsp_settings=SolidLSPSettings(
solidlsp_dir=SerenaPaths().serena_user_home_dir,
project_data_path=self.project_data_path,
ls_specific_settings=self.ls_specific_settings or {},
),
)
class LanguageServerManager:
"""
Manages one or more language servers for a project.
"""
def __init__(
self,
language_servers: dict[Language, SolidLanguageServer],
language_server_factory: LanguageServerFactory | None = None,
) -> None:
"""
:param language_servers: a mapping from language to language server; the servers are assumed to be already started.
The first server in the iteration order is used as the default server.
All servers are assumed to serve the same project root.
:param language_server_factory: factory for language server creation; if None, dynamic (re)creation of language servers
is not supported
"""
self._language_servers = language_servers
self._language_server_factory = language_server_factory
self._default_language_server = next(iter(language_servers.values()))
self._root_path = self._default_language_server.repository_root_path
@staticmethod
def from_languages(languages: list[Language], factory: LanguageServerFactory) -> "LanguageServerManager":
"""
Creates a manager with language servers for the given languages using the given factory.
The language servers are started in parallel threads.
:param languages: the languages for which to spawn language servers
:param factory: the factory for language server creation
:return: the instance
"""
class StartLSThread(threading.Thread):
def __init__(self, language: Language):
super().__init__(target=self._start_language_server, name="StartLS:" + language.value)
self.language = language
self.language_server: SolidLanguageServer | None = None
self.exception: Exception | None = None
def _start_language_server(self) -> None:
try:
with LogTime(f"Language server startup (language={self.language.value})"):
self.language_server = factory.create_language_server(self.language)
self.language_server.start()
if not self.language_server.is_running():
raise RuntimeError(f"Failed to start the language server for language {self.language.value}")
except Exception as e:
log.error(f"Error starting language server for language {self.language.value}: {e}", exc_info=e)
self.exception = e
# start language servers in parallel threads
threads = []
for language in languages:
thread = StartLSThread(language)
thread.start()
threads.append(thread)
# collect language servers and exceptions
language_servers: dict[Language, SolidLanguageServer] = {}
exceptions: dict[Language, Exception] = {}
for thread in threads:
thread.join()
if thread.exception is not None:
exceptions[thread.language] = thread.exception
elif thread.language_server is not None:
language_servers[thread.language] = thread.language_server
# If any server failed to start up, raise an exception and stop all started language servers.
# We intentionally fail fast here. The user's intention is to work with all the specified languages,
# so if any of them is not available, it is better to make symbolic tool calls fail, bringing the issue to the
# user's attention instead of silently continuing with a subset of the language servers and potentially
# causing suboptimal agent behaviour.
if exceptions:
for ls in language_servers.values():
ls.stop()
failure_messages = "\n".join([f"{lang.value}: {e}" for lang, e in exceptions.items()])
raise LanguageServerManagerInitialisationError(f"Failed to start {len(exceptions)} language server(s):\n{failure_messages}")
return LanguageServerManager(language_servers, factory)
def get_root_path(self) -> str:
return self._root_path
def _ensure_functional_ls(self, ls: SolidLanguageServer) -> SolidLanguageServer:
if not ls.is_running():
log.warning(f"Language server for language {ls.language} is not running; restarting ...")
ls = self.restart_language_server(ls.language)
return ls
def _get_suitable_language_server(self, relative_path: str) -> SolidLanguageServer | None:
""":param relative_path: relative path to a file"""
for candidate in self._language_servers.values():
if not candidate.is_ignored_path(relative_path, ignore_unsupported_files=True):
return candidate
return None
def get_language_server(self, relative_path: str) -> SolidLanguageServer:
""":param relative_path: relative path to a file"""
ls: SolidLanguageServer | None = None
if len(self._language_servers) > 1:
if os.path.isdir(relative_path):
raise ValueError(f"Expected a file path, but got a directory: {relative_path}")
ls = self._get_suitable_language_server(relative_path)
if ls is None:
ls = self._default_language_server
return self._ensure_functional_ls(ls)
def _create_and_start_language_server(self, language: Language) -> SolidLanguageServer:
if self._language_server_factory is None:
raise ValueError(f"No language server factory available to create language server for {language}")
language_server = self._language_server_factory.create_language_server(language)
language_server.start()
self._language_servers[language] = language_server
return language_server
def restart_language_server(self, language: Language) -> SolidLanguageServer:
"""
Forces recreation and restart of the language server for the given language.
It is assumed that the language server for the given language is no longer running.
:param language: the language
:return: the newly created language server
"""
if language not in self._language_servers:
raise ValueError(f"No language server for language {language.value} present; cannot restart")
return self._create_and_start_language_server(language)
def add_language_server(self, language: Language) -> SolidLanguageServer:
"""
Dynamically adds a new language server for the given language.
:param language: the language
:param factory: the factory to create the language server
:return: the newly created language server
"""
if language in self._language_servers:
raise ValueError(f"Language server for language {language.value} already present")
return self._create_and_start_language_server(language)
def remove_language_server(self, language: Language, save_cache: bool = False) -> None:
"""
Removes the language server for the given language, stopping it if it is running.
:param language: the language
"""
if language not in self._language_servers:
raise ValueError(f"No language server for language {language.value} present; cannot remove")
ls = self._language_servers.pop(language)
self._stop_language_server(ls, save_cache=save_cache)
def get_active_languages(self) -> list[Language]:
"""
Returns the list of languages for which language servers are currently managed.
:return: list of languages
"""
return list(self._language_servers.keys())
@staticmethod
def _stop_language_server(ls: SolidLanguageServer, save_cache: bool = False, timeout: float = 2.0) -> None:
if ls.is_running():
if save_cache:
ls.save_cache()
log.info(f"Stopping language server for language {ls.language} ...")
ls.stop(shutdown_timeout=timeout)
def iter_language_servers(self) -> Iterator[SolidLanguageServer]:
for ls in self._language_servers.values():
yield self._ensure_functional_ls(ls)
def stop_all(self, save_cache: bool = False, timeout: float = 2.0) -> None:
"""
Stops all managed language servers.
:param save_cache: whether to save the cache before stopping
:param timeout: timeout for shutdown of each language server
"""
for ls in self.iter_language_servers():
self._stop_language_server(ls, save_cache=save_cache, timeout=timeout)
def save_all_caches(self) -> None:
"""
Saves the caches of all managed language servers.
"""
for ls in self.iter_language_servers():
if ls.is_running():
ls.save_cache()
def has_suitable_ls_for_file(self, relative_file_path: str) -> bool:
return self._get_suitable_language_server(relative_file_path) is not None
| {
"repo_id": "oraios/serena",
"file_path": "src/serena/ls_manager.py",
"license": "MIT License",
"lines": 212,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/serena/task_executor.py | import concurrent.futures
import threading
import time
from collections.abc import Callable
from concurrent.futures import Future
from dataclasses import dataclass
from threading import Thread
from typing import Generic, TypeVar
from sensai.util import logging
from sensai.util.logging import LogTime
from sensai.util.string import ToStringMixin
log = logging.getLogger(__name__)
T = TypeVar("T")
class TaskExecutor:
def __init__(self, name: str):
self._task_executor_lock = threading.Lock()
self._task_executor_queue: list[TaskExecutor.Task] = []
self._task_executor_thread = Thread(target=self._process_task_queue, name=name, daemon=True)
self._task_executor_thread.start()
self._task_executor_task_index = 1
self._task_executor_current_task: TaskExecutor.Task | None = None
self._task_executor_last_executed_task_info: TaskExecutor.TaskInfo | None = None
class Task(ToStringMixin, Generic[T]):
def __init__(self, function: Callable[[], T], name: str, logged: bool = True, timeout: float | None = None):
"""
:param function: the function representing the task to execute
:param name: the name of the task
:param logged: whether to log management of the task; if False, only errors will be logged
:param timeout: the maximum time to wait for task completion in seconds, or None to wait indefinitely
"""
self.name = name
self.future: concurrent.futures.Future = concurrent.futures.Future()
self.logged = logged
self.timeout = timeout
self._function = function
def _tostring_includes(self) -> list[str]:
return ["name"]
def start(self) -> None:
"""
Executes the task in a separate thread, setting the result or exception on the future.
"""
def run_task() -> None:
try:
if self.future.done():
if self.logged:
log.info(f"Task {self.name} was already completed/cancelled; skipping execution")
return
with LogTime(self.name, logger=log, enabled=self.logged):
result = self._function()
if not self.future.done():
self.future.set_result(result)
except Exception as e:
if not self.future.done():
log.error(f"Error during execution of {self.name}: {e}", exc_info=e)
self.future.set_exception(e)
thread = Thread(target=run_task, name=self.name)
thread.start()
def is_done(self) -> bool:
"""
:return: whether the task has completed (either successfully, with failure, or via cancellation)
"""
return self.future.done()
def result(self, timeout: float | None = None) -> T:
"""
Blocks until the task is done or the timeout is reached, and returns the result.
If an exception occurred during task execution, it is raised here.
If the timeout is reached, a TimeoutError is raised (but the task is not cancelled).
If the task is cancelled, a CancelledError is raised.
:param timeout: the maximum time to wait in seconds; if None, use the task's own timeout
(which may be None to wait indefinitely)
:return: True if the task is done, False if the timeout was reached
"""
return self.future.result(timeout=timeout)
def cancel(self) -> None:
"""
Cancels the task. If it has not yet started, it will not be executed.
If it has already started, its future will be marked as cancelled and will raise a CancelledError
when its result is requested.
"""
self.future.cancel()
def wait_until_done(self, timeout: float | None = None) -> None:
"""
Waits until the task is done or the timeout is reached.
The task is done if it either completed successfully, failed with an exception, or was cancelled.
:param timeout: the maximum time to wait in seconds; if None, use the task's own timeout
(which may be None to wait indefinitely)
"""
try:
self.future.result(timeout=timeout)
except:
pass
def _process_task_queue(self) -> None:
while True:
# obtain task from the queue
task: TaskExecutor.Task | None = None
with self._task_executor_lock:
if len(self._task_executor_queue) > 0:
task = self._task_executor_queue.pop(0)
if task is None:
time.sleep(0.1)
continue
# start task execution asynchronously
with self._task_executor_lock:
self._task_executor_current_task = task
if task.logged:
log.info("Starting execution of %s", task.name)
task.start()
# wait for task completion
task.wait_until_done(timeout=task.timeout)
with self._task_executor_lock:
self._task_executor_current_task = None
if task.logged:
self._task_executor_last_executed_task_info = self.TaskInfo.from_task(task, is_running=False)
@dataclass
class TaskInfo:
name: str
is_running: bool
future: Future
"""
future for accessing the task's result
"""
task_id: int
"""
unique identifier of the task
"""
logged: bool
def finished_successfully(self) -> bool:
return self.future.done() and not self.future.cancelled() and self.future.exception() is None
@staticmethod
def from_task(task: "TaskExecutor.Task", is_running: bool) -> "TaskExecutor.TaskInfo":
return TaskExecutor.TaskInfo(name=task.name, is_running=is_running, future=task.future, task_id=id(task), logged=task.logged)
def cancel(self) -> None:
self.future.cancel()
def get_current_tasks(self) -> list[TaskInfo]:
"""
Gets the list of tasks currently running or queued for execution.
The function returns a list of thread-safe TaskInfo objects (specifically created for the caller).
:return: the list of tasks in the execution order (running task first)
"""
tasks = []
with self._task_executor_lock:
if self._task_executor_current_task is not None:
tasks.append(self.TaskInfo.from_task(self._task_executor_current_task, True))
for task in self._task_executor_queue:
if not task.is_done():
tasks.append(self.TaskInfo.from_task(task, False))
return tasks
def issue_task(self, task: Callable[[], T], name: str | None = None, logged: bool = True, timeout: float | None = None) -> Task[T]:
"""
Issue a task to the executor for asynchronous execution.
It is ensured that tasks are executed in the order they are issued, one after another.
:param task: the task to execute
:param name: the name of the task for logging purposes; if None, use the task function's name
:param logged: whether to log management of the task; if False, only errors will be logged
:param timeout: the maximum time to wait for task completion in seconds, or None to wait indefinitely
:return: the task object, through which the task's future result can be accessed
"""
with self._task_executor_lock:
if logged:
task_prefix_name = f"Task-{self._task_executor_task_index}"
self._task_executor_task_index += 1
else:
task_prefix_name = "BackgroundTask"
task_name = f"{task_prefix_name}:{name or task.__name__}"
if logged:
log.info(f"Scheduling {task_name}")
task_obj = self.Task(function=task, name=task_name, logged=logged, timeout=timeout)
self._task_executor_queue.append(task_obj)
return task_obj
def execute_task(self, task: Callable[[], T], name: str | None = None, logged: bool = True, timeout: float | None = None) -> T:
"""
Executes the given task synchronously via the agent's task executor.
This is useful for tasks that need to be executed immediately and whose results are needed right away.
:param task: the task to execute
:param name: the name of the task for logging purposes; if None, use the task function's name
:param logged: whether to log management of the task; if False, only errors will be logged
:param timeout: the maximum time to wait for task completion in seconds, or None to wait indefinitely
:return: the result of the task execution
"""
task_obj = self.issue_task(task, name=name, logged=logged, timeout=timeout)
return task_obj.result()
def get_last_executed_task(self) -> TaskInfo | None:
"""
Gets information about the last executed task.
:return: TaskInfo of the last executed task, or None if no task has been executed yet.
"""
with self._task_executor_lock:
return self._task_executor_last_executed_task_info
| {
"repo_id": "oraios/serena",
"file_path": "src/serena/task_executor.py",
"license": "MIT License",
"lines": 188,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/serena/util/cli_util.py | def ask_yes_no(question: str, default: bool | None = None) -> bool:
default_prompt = "Y/n" if default else "y/N"
while True:
answer = input(f"{question} [{default_prompt}] ").strip().lower()
if answer == "" and default is not None:
return default
if answer in ("y", "yes"):
return True
if answer in ("n", "no"):
return False
print("Please answer yes/y or no/n.")
| {
"repo_id": "oraios/serena",
"file_path": "src/serena/util/cli_util.py",
"license": "MIT License",
"lines": 11,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
oraios/serena:src/solidlsp/language_servers/fortran_language_server.py | """
Fortran Language Server implementation using fortls.
"""
import logging
import os
import pathlib
import re
import shutil
from overrides import override
from solidlsp import ls_types
from solidlsp.ls import DocumentSymbols, LSPFileBuffer, SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class FortranLanguageServer(SolidLanguageServer):
"""Fortran Language Server implementation using fortls."""
@override
def _get_wait_time_for_cross_file_referencing(self) -> float:
return 3.0 # fortls needs time for workspace indexing
@override
def is_ignored_dirname(self, dirname: str) -> bool:
# For Fortran projects, ignore common build directories
return super().is_ignored_dirname(dirname) or dirname in [
"build",
"Build",
"BUILD",
"bin",
"lib",
"mod", # Module files directory
"obj", # Object files directory
".cmake",
"CMakeFiles",
]
def _fix_fortls_selection_range(
self, symbol: ls_types.UnifiedSymbolInformation, file_content: str
) -> ls_types.UnifiedSymbolInformation:
"""
Fix fortls's incorrect selectionRange that points to line start instead of identifier name.
fortls bug: selectionRange.start.character is 0 (line start) but should point to the
function/subroutine/module/program name position. This breaks MCP server features that
rely on the exact identifier position for finding references.
Args:
symbol: The symbol with potentially incorrect selectionRange
file_content: Full file content to parse the line
Returns:
Symbol with corrected selectionRange pointing to the identifier name
"""
if "selectionRange" not in symbol:
return symbol
sel_range = symbol["selectionRange"]
start_line = sel_range["start"]["line"]
start_char = sel_range["start"]["character"]
# Split file content into lines
lines = file_content.split("\n")
if start_line >= len(lines):
return symbol
line = lines[start_line]
# Fortran keywords that define named constructs
# Match patterns:
# Standard keywords: <keyword> <whitespace> <identifier_name>
# " function add_numbers(a, b) result(sum)" -> keyword="function", name="add_numbers"
# "subroutine print_result(value)" -> keyword="subroutine", name="print_result"
# "module math_utils" -> keyword="module", name="math_utils"
# "program test_program" -> keyword="program", name="test_program"
# "interface distance" -> keyword="interface", name="distance"
#
# Type definitions (can have :: syntax):
# "type point" -> keyword="type", name="point"
# "type :: point" -> keyword="type", name="point"
# "type, extends(base) :: derived" -> keyword="type", name="derived"
#
# Submodules (have parent module in parentheses):
# "submodule (parent_mod) child_mod" -> keyword="submodule", name="child_mod"
# Try type pattern first (has complex syntax with optional comma and ::)
type_pattern = r"^\s*type\s*(?:,.*?)?\s*(?:::)?\s*([a-zA-Z_]\w*)"
match = re.match(type_pattern, line, re.IGNORECASE)
if match:
# For type pattern, identifier is in group 1
identifier_name = match.group(1)
identifier_start = match.start(1)
else:
# Try standard keywords pattern
standard_pattern = r"^\s*(function|subroutine|module|program|interface)\s+([a-zA-Z_]\w*)"
match = re.match(standard_pattern, line, re.IGNORECASE)
if not match:
# Try submodule pattern
submodule_pattern = r"^\s*submodule\s*\([^)]+\)\s+([a-zA-Z_]\w*)"
match = re.match(submodule_pattern, line, re.IGNORECASE)
if match:
identifier_name = match.group(1)
identifier_start = match.start(1)
else:
identifier_name = match.group(2)
identifier_start = match.start(2)
if match:
# Create corrected selectionRange
new_sel_range = {
"start": {"line": start_line, "character": identifier_start},
"end": {"line": start_line, "character": identifier_start + len(identifier_name)},
}
# Create modified symbol with corrected selectionRange
corrected_symbol = symbol.copy()
corrected_symbol["selectionRange"] = new_sel_range # type: ignore[typeddict-item]
log.debug(f"Fixed fortls selectionRange for {identifier_name}: char {start_char} -> {identifier_start}")
return corrected_symbol
# If no match, return symbol unchanged (e.g., for variables, which don't have this pattern)
return symbol
@override
def request_document_symbols(self, relative_file_path: str, file_buffer: LSPFileBuffer | None = None) -> DocumentSymbols:
# Override to fix fortls's incorrect selectionRange bug.
#
# fortls returns selectionRange pointing to line start (character 0) instead of the
# identifier name position. This breaks MCP server features that rely on exact positions.
#
# This override:
# 1. Gets symbols from fortls via parent implementation
# 2. Parses each symbol's line to find the correct identifier position
# 3. Fixes selectionRange for all symbols recursively
# 4. Returns corrected symbols
# Get symbols from fortls (with incorrect selectionRange)
document_symbols = super().request_document_symbols(relative_file_path, file_buffer=file_buffer)
# Get file content for parsing
with self.open_file(relative_file_path) as file_data:
file_content = file_data.contents
# Fix selectionRange recursively for all symbols
def fix_symbol_and_children(symbol: ls_types.UnifiedSymbolInformation) -> ls_types.UnifiedSymbolInformation:
# Fix this symbol's selectionRange
fixed = self._fix_fortls_selection_range(symbol, file_content)
# Fix children recursively
if fixed.get("children"):
fixed["children"] = [fix_symbol_and_children(child) for child in fixed["children"]]
return fixed
# Apply fix to all symbols
fixed_root_symbols = [fix_symbol_and_children(sym) for sym in document_symbols.root_symbols]
return DocumentSymbols(fixed_root_symbols)
@staticmethod
def _check_fortls_installation() -> str:
"""Check if fortls is available."""
fortls_path = shutil.which("fortls")
if fortls_path is None:
raise RuntimeError("fortls is not installed or not in PATH.\nInstall it with: pip install fortls")
return fortls_path
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
# Check fortls installation
fortls_path = self._check_fortls_installation()
# Command to start fortls language server
# fortls uses stdio for LSP communication by default
fortls_cmd = f"{fortls_path}"
super().__init__(
config, repository_root_path, ProcessLaunchInfo(cmd=fortls_cmd, cwd=repository_root_path), "fortran", solidlsp_settings
)
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""Initialize params for Fortran Language Server."""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"locale": "en",
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"completion": {
"dynamicRegistration": True,
"completionItem": {
"snippetSupport": True,
"commitCharactersSupport": True,
"documentationFormat": ["markdown", "plaintext"],
"deprecatedSupport": True,
"preselectSupport": True,
},
},
"hover": {"dynamicRegistration": True, "contentFormat": ["markdown", "plaintext"]},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
"formatting": {"dynamicRegistration": True},
"rangeFormatting": {"dynamicRegistration": True},
"codeAction": {"dynamicRegistration": True},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"symbol": {
"dynamicRegistration": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
},
},
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
}
return initialize_params # type: ignore[return-value]
def _start_server(self) -> None:
"""Start Fortran Language Server process."""
def window_log_message(msg: dict) -> None:
log.info(f"Fortran LSP: window/logMessage: {msg}")
def do_nothing(params: dict) -> None:
return
def register_capability_handler(params: dict) -> None:
return
# Register LSP message handlers
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting Fortran Language Server (fortls) process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request to Fortran Language Server")
init_response = self.server.send.initialize(initialize_params)
# Verify server capabilities
capabilities = init_response.get("capabilities", {})
assert "textDocumentSync" in capabilities
if "completionProvider" in capabilities:
log.info("Fortran LSP completion provider available")
if "definitionProvider" in capabilities:
log.info("Fortran LSP definition provider available")
if "referencesProvider" in capabilities:
log.info("Fortran LSP references provider available")
if "documentSymbolProvider" in capabilities:
log.info("Fortran LSP document symbol provider available")
self.server.notify.initialized({})
# Fortran Language Server is ready after initialization
log.info("Fortran Language Server initialization complete")
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/fortran_language_server.py",
"license": "MIT License",
"lines": 236,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/haskell_language_server.py | """
Provides Haskell specific instantiation of the LanguageServer class. Contains various configurations and settings specific to Haskell.
"""
import logging
import os
import pathlib
import shutil
import time
from typing import Any
from overrides import override
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class HaskellLanguageServer(SolidLanguageServer):
"""
Provides Haskell specific instantiation of the LanguageServer class. Contains various configurations and settings specific to Haskell.
Uses Haskell Language Server (HLS) for LSP functionality.
"""
@staticmethod
def _ensure_hls_installed() -> str:
"""Ensure haskell-language-server-wrapper is available."""
# Try common locations
common_paths = [
shutil.which("haskell-language-server-wrapper"),
"/opt/homebrew/bin/haskell-language-server-wrapper",
"/usr/local/bin/haskell-language-server-wrapper",
os.path.expanduser("~/.ghcup/bin/haskell-language-server-wrapper"),
os.path.expanduser("~/.cabal/bin/haskell-language-server-wrapper"),
os.path.expanduser("~/.local/bin/haskell-language-server-wrapper"),
]
# Check Stack programs directory
stack_programs = os.path.expanduser("~/.local/share/stack/programs")
if os.path.exists(stack_programs):
try:
for arch_dir in os.listdir(stack_programs):
arch_path = os.path.join(stack_programs, arch_dir)
if os.path.isdir(arch_path):
try:
for ghc_dir in os.listdir(arch_path):
hls_path = os.path.join(arch_path, ghc_dir, "bin", "haskell-language-server-wrapper")
if os.path.isfile(hls_path) and os.access(hls_path, os.X_OK):
common_paths.append(hls_path)
except (PermissionError, OSError):
# Skip directories we can't read
continue
except (PermissionError, OSError):
# Stack programs directory not accessible
pass
for path in common_paths:
if path and os.path.isfile(path) and os.access(path, os.X_OK):
return path
raise RuntimeError(
"haskell-language-server-wrapper is not installed or not in PATH.\n"
"Searched locations:\n" + "\n".join(f" - {p}" for p in common_paths if p) + "\n"
"Please install HLS via:\n"
" - GHCup: https://www.haskell.org/ghcup/\n"
" - Stack: stack install haskell-language-server\n"
" - Cabal: cabal install haskell-language-server\n"
" - Homebrew (macOS): brew install haskell-language-server"
)
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Creates a HaskellLanguageServer instance. This class is not meant to be instantiated directly. Use LanguageServer.create() instead.
"""
hls_executable_path = self._ensure_hls_installed()
log.info(f"Using haskell-language-server at: {hls_executable_path}")
# Check if there's a haskell subdirectory with Stack/Cabal project
haskell_subdir = os.path.join(repository_root_path, "haskell")
if os.path.exists(haskell_subdir) and (
os.path.exists(os.path.join(haskell_subdir, "stack.yaml")) or os.path.exists(os.path.join(haskell_subdir, "cabal.project"))
):
working_dir = haskell_subdir
log.info(f"Using Haskell project directory: {working_dir}")
else:
working_dir = repository_root_path
# Set up environment with GHCup bin in PATH
env = dict(os.environ)
ghcup_bin = os.path.expanduser("~/.ghcup/bin")
if ghcup_bin not in env.get("PATH", ""):
env["PATH"] = f"{ghcup_bin}{os.pathsep}{env.get('PATH', '')}"
super().__init__(
config,
repository_root_path,
ProcessLaunchInfo(cmd=[hls_executable_path, "--lsp", "--cwd", working_dir], cwd=working_dir, env=env),
"haskell",
solidlsp_settings,
)
@override
def is_ignored_dirname(self, dirname: str) -> bool:
return super().is_ignored_dirname(dirname) or dirname in ["dist", "dist-newstyle", ".stack-work", ".cabal-sandbox"]
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the Haskell Language Server.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"clientInfo": {"name": "Serena", "version": "0.1.0"},
"locale": "en",
"capabilities": {
"workspace": {
"applyEdit": True,
"workspaceEdit": {
"documentChanges": True,
"resourceOperations": ["create", "rename", "delete"],
"failureHandling": "textOnlyTransactional",
"normalizesLineEndings": True,
"changeAnnotationSupport": {"groupsOnLabel": True},
},
"configuration": True,
"didChangeWatchedFiles": {"dynamicRegistration": True, "relativePatternSupport": True},
"symbol": {
"dynamicRegistration": True,
"symbolKind": {"valueSet": list(range(1, 27))},
"tagSupport": {"valueSet": [1]},
"resolveSupport": {"properties": ["location.range"]},
},
"executeCommand": {"dynamicRegistration": True},
"didChangeConfiguration": {"dynamicRegistration": True},
"workspaceFolders": True,
"semanticTokens": {"refreshSupport": True},
},
"textDocument": {
"publishDiagnostics": {
"relatedInformation": True,
"versionSupport": False,
"tagSupport": {"valueSet": [1, 2]},
"codeDescriptionSupport": True,
"dataSupport": True,
},
"synchronization": {"dynamicRegistration": True, "willSave": True, "willSaveWaitUntil": True, "didSave": True},
"completion": {
"dynamicRegistration": True,
"contextSupport": True,
"completionItem": {
"snippetSupport": True,
"commitCharactersSupport": True,
"documentationFormat": ["markdown", "plaintext"],
"deprecatedSupport": True,
"preselectSupport": True,
"tagSupport": {"valueSet": [1]},
"insertReplaceSupport": True,
"resolveSupport": {"properties": ["documentation", "detail", "additionalTextEdits"]},
"insertTextModeSupport": {"valueSet": [1, 2]},
"labelDetailsSupport": True,
},
"insertTextMode": 2,
"completionItemKind": {
"valueSet": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25]
},
},
"hover": {"dynamicRegistration": True, "contentFormat": ["markdown", "plaintext"]},
"signatureHelp": {
"dynamicRegistration": True,
"signatureInformation": {
"documentationFormat": ["markdown", "plaintext"],
"parameterInformation": {"labelOffsetSupport": True},
"activeParameterSupport": True,
},
"contextSupport": True,
},
"definition": {"dynamicRegistration": True, "linkSupport": True},
"references": {"dynamicRegistration": True},
"documentHighlight": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"symbolKind": {"valueSet": list(range(1, 27))},
"hierarchicalDocumentSymbolSupport": True,
"tagSupport": {"valueSet": [1]},
"labelSupport": True,
},
"codeAction": {
"dynamicRegistration": True,
"isPreferredSupport": True,
"disabledSupport": True,
"dataSupport": True,
"resolveSupport": {"properties": ["edit"]},
"codeActionLiteralSupport": {
"codeActionKind": {
"valueSet": [
"",
"quickfix",
"refactor",
"refactor.extract",
"refactor.inline",
"refactor.rewrite",
"source",
"source.organizeImports",
]
}
},
"honorsChangeAnnotations": False,
},
"formatting": {"dynamicRegistration": True},
"rangeFormatting": {"dynamicRegistration": True},
"onTypeFormatting": {"dynamicRegistration": True},
"rename": {
"dynamicRegistration": True,
"prepareSupport": True,
"prepareSupportDefaultBehavior": 1,
"honorsChangeAnnotations": True,
},
"documentLink": {"dynamicRegistration": True, "tooltipSupport": True},
"typeDefinition": {"dynamicRegistration": True, "linkSupport": True},
"implementation": {"dynamicRegistration": True, "linkSupport": True},
"colorProvider": {"dynamicRegistration": True},
"foldingRange": {
"dynamicRegistration": True,
"rangeLimit": 5000,
"lineFoldingOnly": True,
"foldingRangeKind": {"valueSet": ["comment", "imports", "region"]},
},
"declaration": {"dynamicRegistration": True, "linkSupport": True},
"selectionRange": {"dynamicRegistration": True},
"callHierarchy": {"dynamicRegistration": True},
"semanticTokens": {
"dynamicRegistration": True,
"tokenTypes": [
"namespace",
"type",
"class",
"enum",
"interface",
"struct",
"typeParameter",
"parameter",
"variable",
"property",
"enumMember",
"event",
"function",
"method",
"macro",
"keyword",
"modifier",
"comment",
"string",
"number",
"regexp",
"operator",
],
"tokenModifiers": [
"declaration",
"definition",
"readonly",
"static",
"deprecated",
"abstract",
"async",
"modification",
"documentation",
"defaultLibrary",
],
"formats": ["relative"],
"requests": {"range": True, "full": {"delta": True}},
"multilineTokenSupport": False,
"overlappingTokenSupport": False,
},
"linkedEditingRange": {"dynamicRegistration": True},
},
"window": {
"showMessage": {"messageActionItem": {"additionalPropertiesSupport": True}},
"showDocument": {"support": True},
"workDoneProgress": True,
},
"general": {
"staleRequestSupport": {"cancel": True, "retryOnContentModified": []},
"regularExpressions": {"engine": "ECMAScript", "version": "ES2020"},
"markdown": {
"parser": "marked",
"version": "1.1.0",
},
"positionEncodings": ["utf-16"],
},
},
"initializationOptions": {
"haskell": {
"formattingProvider": "ormolu",
"checkProject": True,
}
},
"trace": "verbose",
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
}
return initialize_params # type: ignore
def _start_server(self) -> None:
"""
Starts the Haskell Language Server
"""
def do_nothing(params: Any) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
def register_capability_handler(params: dict) -> None:
"""Handle dynamic capability registration from HLS"""
if "registrations" in params:
for registration in params.get("registrations", []):
method = registration.get("method", "")
log.info(f"HLS registered capability: {method}")
return
def workspace_configuration_handler(params: dict) -> Any:
"""Handle workspace/configuration requests from HLS"""
log.info(f"HLS requesting configuration: {params}")
# Configuration matching VS Code settings and initialization options
haskell_config = {
"formattingProvider": "ormolu",
"checkProject": True,
"plugin": {"importLens": {"codeActionsOn": False, "codeLensOn": False}, "hlint": {"codeActionsOn": False}},
}
# HLS expects array of config items matching requested sections
if isinstance(params, dict) and "items" in params:
result = []
for item in params["items"]:
section = item.get("section", "")
if section == "haskell":
result.append(haskell_config)
else:
result.append({})
log.info(f"Returning configuration: {result}")
return result
# Fallback: return single config
return [haskell_config]
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_request("workspace/configuration", workspace_configuration_handler)
log.info("Starting Haskell Language Server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
# Log capabilities returned by HLS
capabilities = init_response.get("capabilities", {})
log.info(f"HLS capabilities: {list(capabilities.keys())}")
self.server.notify.initialized({})
# Give HLS time to index the project
# HLS can be slow to index, especially on first run
log.info("Waiting for HLS to index project...")
time.sleep(5)
log.info("Haskell Language Server initialized successfully")
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/haskell_language_server.py",
"license": "MIT License",
"lines": 352,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/julia_server.py | import logging
import os
import pathlib
import platform
import shutil
import subprocess
from typing import Any
from overrides import override
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class JuliaLanguageServer(SolidLanguageServer):
"""
Language server implementation for Julia using LanguageServer.jl.
"""
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
julia_executable = self._setup_runtime_dependency() # PASS LOGGER
julia_code = "using LanguageServer; runserver()"
julia_ls_cmd: str | list[str]
if platform.system() == "Windows":
# On Windows, pass as list (Serena handles shell=True differently)
julia_ls_cmd = [julia_executable, "--startup-file=no", "--history-file=no", "-e", julia_code, repository_root_path]
else:
# On Linux/macOS, build shell-escaped string
import shlex
julia_ls_cmd = (
f"{shlex.quote(julia_executable)} "
f"--startup-file=no "
f"--history-file=no "
f"-e {shlex.quote(julia_code)} "
f"{shlex.quote(repository_root_path)}"
)
log.info(f"[JULIA DEBUG] Command: {julia_ls_cmd}")
super().__init__(
config, repository_root_path, ProcessLaunchInfo(cmd=julia_ls_cmd, cwd=repository_root_path), "julia", solidlsp_settings
)
@staticmethod
def _setup_runtime_dependency() -> str:
"""
Check if the Julia runtime is available and return its full path.
Raises RuntimeError with a helpful message if the dependency is missing.
"""
# First check if julia is in PATH
julia_path = shutil.which("julia")
# If not found in PATH, check common installation locations
if julia_path is None:
common_locations = [
os.path.expanduser("~/.juliaup/bin/julia"),
os.path.expanduser("~/.julia/bin/julia"),
"/usr/local/bin/julia",
"/usr/bin/julia",
]
for location in common_locations:
if os.path.isfile(location) and os.access(location, os.X_OK):
julia_path = location
break
if julia_path is None:
raise RuntimeError(
"Julia is not installed or not in your PATH. "
"Please install Julia from https://julialang.org/downloads/ and ensure it is accessible. "
f"Checked locations: {common_locations}"
)
# Check if LanguageServer.jl is installed
check_cmd = [julia_path, "-e", "using LanguageServer"]
try:
result = subprocess.run(check_cmd, check=False, capture_output=True, text=True, timeout=10)
if result.returncode != 0:
# LanguageServer.jl not found, install it
JuliaLanguageServer._install_language_server(julia_path)
except subprocess.TimeoutExpired:
# Assume it needs installation
JuliaLanguageServer._install_language_server(julia_path)
return julia_path
@staticmethod
def _install_language_server(julia_path: str) -> None:
"""Install LanguageServer.jl package."""
log.info("LanguageServer.jl not found. Installing... (this may take a minute)")
install_cmd = [julia_path, "-e", 'using Pkg; Pkg.add("LanguageServer")']
try:
result = subprocess.run(install_cmd, check=False, capture_output=True, text=True, timeout=300) # 5 minutes for installation
if result.returncode == 0:
log.info("LanguageServer.jl installed successfully!")
else:
raise RuntimeError(f"Failed to install LanguageServer.jl: {result.stderr}")
except subprocess.TimeoutExpired:
raise RuntimeError(
"LanguageServer.jl installation timed out. Please install manually: julia -e 'using Pkg; Pkg.add(\"LanguageServer\")'"
)
@override
def is_ignored_dirname(self, dirname: str) -> bool:
"""Define language-specific directories to ignore for Julia projects."""
return super().is_ignored_dirname(dirname) or dirname in [".julia", "build", "dist"]
def _get_initialize_params(self, repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the Julia Language Server.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params: InitializeParams = { # type: ignore
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"capabilities": {
"workspace": {"workspaceFolders": True},
"textDocument": {
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {"dynamicRegistration": True},
},
},
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
}
return initialize_params # type: ignore
def _start_server(self) -> None:
"""Start the LanguageServer.jl server process."""
def do_nothing(params: Any) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting LanguageServer.jl server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request to Julia Language Server")
init_response = self.server.send.initialize(initialize_params)
assert "definitionProvider" in init_response["capabilities"]
assert "referencesProvider" in init_response["capabilities"]
assert "documentSymbolProvider" in init_response["capabilities"]
self.server.notify.initialized({})
log.info("Julia Language Server is initialized and ready.")
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/julia_server.py",
"license": "MIT License",
"lines": 139,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/scala_language_server.py | """
Provides Scala specific instantiation of the LanguageServer class. Contains various configurations and settings specific to Scala.
"""
import logging
import os
import pathlib
import shutil
import subprocess
from enum import Enum
from overrides import override
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.ls_utils import PlatformUtils
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
if not PlatformUtils.get_platform_id().value.startswith("win"):
pass
log = logging.getLogger(__name__)
# Default configuration constants
DEFAULT_METALS_VERSION = "1.6.4"
DEFAULT_CLIENT_NAME = "Serena"
DEFAULT_ON_STALE_LOCK = "auto-clean"
DEFAULT_LOG_MULTI_INSTANCE_NOTICE = True
class StaleLockMode(Enum):
"""Mode for handling stale Metals H2 database locks."""
AUTO_CLEAN = "auto-clean"
"""Automatically remove stale lock files (default, recommended)."""
WARN = "warn"
"""Log a warning but proceed; may result in degraded experience."""
FAIL = "fail"
"""Raise an error and refuse to start."""
def _get_scala_settings(solidlsp_settings: SolidLSPSettings) -> dict[str, object]:
"""
Extract Scala-specific settings with defaults applied.
Returns a dictionary with keys:
- metals_version: str
- client_name: str
- on_stale_lock: StaleLockMode
- log_multi_instance_notice: bool
"""
from solidlsp.ls_config import Language
defaults: dict[str, object] = {
"metals_version": DEFAULT_METALS_VERSION,
"client_name": DEFAULT_CLIENT_NAME,
"on_stale_lock": StaleLockMode.AUTO_CLEAN,
"log_multi_instance_notice": DEFAULT_LOG_MULTI_INSTANCE_NOTICE,
}
if not solidlsp_settings.ls_specific_settings:
return defaults
scala_settings = solidlsp_settings.get_ls_specific_settings(Language.SCALA)
# Parse stale lock mode with validation
on_stale_lock_str = scala_settings.get("on_stale_lock", DEFAULT_ON_STALE_LOCK)
try:
on_stale_lock = StaleLockMode(on_stale_lock_str)
except ValueError:
log.warning(f"Invalid on_stale_lock value '{on_stale_lock_str}', using '{DEFAULT_ON_STALE_LOCK}'")
on_stale_lock = StaleLockMode.AUTO_CLEAN
return {
"metals_version": scala_settings.get("metals_version", DEFAULT_METALS_VERSION),
"client_name": scala_settings.get("client_name", DEFAULT_CLIENT_NAME),
"on_stale_lock": on_stale_lock,
"log_multi_instance_notice": scala_settings.get("log_multi_instance_notice", DEFAULT_LOG_MULTI_INSTANCE_NOTICE),
}
class ScalaLanguageServer(SolidLanguageServer):
"""
Provides Scala specific instantiation of the LanguageServer class.
Contains various configurations and settings specific to Scala.
Configurable options in ls_specific_settings (in serena_config.yml):
ls_specific_settings:
scala:
# Stale lock handling: auto-clean | warn | fail
on_stale_lock: 'auto-clean'
# Log notice when another Metals instance is detected
log_multi_instance_notice: true
# Metals version to bootstrap (default: DEFAULT_METALS_VERSION)
metals_version: '1.6.4'
# Client identifier sent to Metals (default: DEFAULT_CLIENT_NAME)
client_name: 'Serena'
Multi-instance support:
Metals uses H2 AUTO_SERVER mode (enabled by default) to support multiple
concurrent instances sharing the same database. Running Serena's Metals
alongside VS Code's Metals is designed to work. The only issue is stale
locks from crashed processes, which this class can detect and clean up.
"""
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Creates a ScalaLanguageServer instance. This class is not meant to be instantiated directly.
Use LanguageServer.create() instead.
"""
# Check for stale locks before setting up dependencies (fail-fast)
self._check_metals_db_status(repository_root_path, solidlsp_settings)
scala_lsp_executable_path = self._setup_runtime_dependencies(config, solidlsp_settings)
super().__init__(
config,
repository_root_path,
ProcessLaunchInfo(cmd=scala_lsp_executable_path, cwd=repository_root_path),
config.code_language.value,
solidlsp_settings,
)
def _check_metals_db_status(self, repository_root_path: str, solidlsp_settings: SolidLSPSettings) -> None:
"""
Check the Metals H2 database status and handle stale locks.
This method is called before setting up runtime dependencies to fail-fast
if there's a stale lock that the user has configured to fail on.
"""
from pathlib import Path
from solidlsp.ls_exceptions import MetalsStaleLockError
from solidlsp.util.metals_db_utils import (
MetalsDbStatus,
check_metals_db_status,
cleanup_stale_lock,
)
project_path = Path(repository_root_path)
status, lock_info = check_metals_db_status(project_path)
# Get settings using the shared helper function
settings = _get_scala_settings(solidlsp_settings)
on_stale_lock: StaleLockMode = settings["on_stale_lock"] # type: ignore[assignment]
log_multi_instance_notice: bool = settings["log_multi_instance_notice"] # type: ignore[assignment]
if status == MetalsDbStatus.ACTIVE_INSTANCE:
if log_multi_instance_notice and lock_info:
log.info(
f"Another Metals instance detected (PID: {lock_info.pid}). "
"This is fine - Metals supports multiple instances via H2 AUTO_SERVER. "
"Both instances will share the database and Bloop build server."
)
elif status == MetalsDbStatus.STALE_LOCK:
lock_path = lock_info.lock_path if lock_info else project_path / ".metals" / "metals.mv.db.lock.db"
lock_path_str = str(lock_path)
if on_stale_lock == StaleLockMode.AUTO_CLEAN:
log.info(f"Stale Metals lock detected, cleaning up: {lock_path_str}")
cleanup_success = cleanup_stale_lock(lock_path)
if not cleanup_success:
log.warning(
f"Failed to clean up stale lock at {lock_path_str}. "
"Metals may fall back to in-memory database (degraded experience)."
)
elif on_stale_lock == StaleLockMode.WARN:
log.warning(
f"Stale Metals lock detected at {lock_path_str}. "
"A previous Metals process may have crashed. "
"Metals will fall back to in-memory database (degraded experience). "
"Consider removing the lock file manually or setting on_stale_lock='auto-clean'."
)
elif on_stale_lock == StaleLockMode.FAIL:
raise MetalsStaleLockError(lock_path_str)
@override
def is_ignored_dirname(self, dirname: str) -> bool:
return super().is_ignored_dirname(dirname) or dirname in [
".bloop",
".metals",
"target",
]
@classmethod
def _setup_runtime_dependencies(cls, config: LanguageServerConfig, solidlsp_settings: SolidLSPSettings) -> list[str]:
"""
Setup runtime dependencies for Scala Language Server and return the command to start the server.
"""
assert shutil.which("java") is not None, "JDK is not installed or not in PATH."
# Check if metals is available globally in PATH
global_metals = shutil.which("metals")
if global_metals:
log.info(f"Found metals in PATH: {global_metals}")
return [global_metals]
# Get settings using the shared helper function
settings = _get_scala_settings(solidlsp_settings)
metals_version: str = settings["metals_version"] # type: ignore[assignment]
client_name: str = settings["client_name"] # type: ignore[assignment]
metals_home = os.path.join(cls.ls_resources_dir(solidlsp_settings), "metals-lsp")
os.makedirs(metals_home, exist_ok=True)
metals_executable = os.path.join(metals_home, metals_version, "metals")
if not os.path.exists(metals_executable):
coursier_command_path = shutil.which("coursier")
cs_command_path = shutil.which("cs")
assert cs_command_path is not None or coursier_command_path is not None, "coursier is not installed or not in PATH."
if not cs_command_path:
assert coursier_command_path is not None
log.info("'cs' command not found. Trying to install it using 'coursier'.")
try:
log.info("Running 'coursier setup --yes' to install 'cs'...")
subprocess.run([coursier_command_path, "setup", "--yes"], check=True, capture_output=True, text=True)
except subprocess.CalledProcessError as e:
raise RuntimeError(f"Failed to set up 'cs' command with 'coursier setup'. Stderr: {e.stderr}")
cs_command_path = shutil.which("cs")
if not cs_command_path:
raise RuntimeError(
"'cs' command not found after running 'coursier setup'. Please check your PATH or install it manually."
)
log.info("'cs' command installed successfully.")
log.info(f"metals executable not found at {metals_executable}, bootstrapping...")
subprocess.run(["mkdir", "-p", os.path.join(metals_home, metals_version)], check=True)
artifact = f"org.scalameta:metals_2.13:{metals_version}"
cmd = [
cs_command_path,
"bootstrap",
"--java-opt",
"-XX:+UseG1GC",
"--java-opt",
"-XX:+UseStringDeduplication",
"--java-opt",
"-Xss4m",
"--java-opt",
"-Xms100m",
"--java-opt",
f"-Dmetals.client={client_name}",
artifact,
"-o",
metals_executable,
"-f",
]
log.info("Bootstrapping metals...")
subprocess.run(cmd, cwd=metals_home, check=True)
log.info("Bootstrapping metals finished.")
return [metals_executable]
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the Scala Language Server.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"locale": "en",
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"initializationOptions": {
"compilerOptions": {
"completionCommand": None,
"isCompletionItemDetailEnabled": True,
"isCompletionItemDocumentationEnabled": True,
"isCompletionItemResolve": True,
"isHoverDocumentationEnabled": True,
"isSignatureHelpDocumentationEnabled": True,
"overrideDefFormat": "ascli",
"snippetAutoIndent": False,
},
"debuggingProvider": True,
"decorationProvider": False,
"didFocusProvider": False,
"doctorProvider": False,
"executeClientCommandProvider": False,
"globSyntax": "uri",
"icons": "unicode",
"inputBoxProvider": False,
"isVirtualDocumentSupported": False,
"isExitOnShutdown": True,
"isHttpEnabled": True,
"openFilesOnRenameProvider": False,
"quickPickProvider": False,
"renameFileThreshold": 200,
"statusBarProvider": "false",
"treeViewProvider": False,
"testExplorerProvider": False,
"openNewWindowProvider": False,
"copyWorksheetOutputProvider": False,
"doctorVisibilityProvider": False,
},
"capabilities": {"textDocument": {"documentSymbol": {"hierarchicalDocumentSymbolSupport": True}}},
}
return initialize_params # type: ignore
def _start_server(self) -> None:
"""
Starts the Scala Language Server
"""
log.info("Starting Scala server process")
self.server.start()
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
initialize_params = self._get_initialize_params(self.repository_root_path)
self.server.send.initialize(initialize_params)
self.server.notify.initialized({})
@override
def _get_wait_time_for_cross_file_referencing(self) -> float:
return 5
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/scala_language_server.py",
"license": "MIT License",
"lines": 273,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/yaml_language_server.py | """
Provides YAML specific instantiation of the LanguageServer class using yaml-language-server.
Contains various configurations and settings specific to YAML files.
"""
import logging
import os
import pathlib
import shutil
from typing import Any
from solidlsp.language_servers.common import RuntimeDependency, RuntimeDependencyCollection
from solidlsp.ls import LanguageServerDependencyProvider, LanguageServerDependencyProviderSinglePath, SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class YamlLanguageServer(SolidLanguageServer):
"""
Provides YAML specific instantiation of the LanguageServer class using yaml-language-server.
Contains various configurations and settings specific to YAML files.
"""
@staticmethod
def _determine_log_level(line: str) -> int:
"""Classify yaml-language-server stderr output to avoid false-positive errors."""
line_lower = line.lower()
# Known informational messages from yaml-language-server that aren't critical errors
if any(
[
"cannot find module" in line_lower and "package.json" in line_lower, # Schema resolution - not critical
"no parser" in line_lower, # Parser messages - informational
]
):
return logging.DEBUG
return SolidLanguageServer._determine_log_level(line)
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Creates a YamlLanguageServer instance. This class is not meant to be instantiated directly.
Use LanguageServer.create() instead.
"""
super().__init__(
config,
repository_root_path,
None,
"yaml",
solidlsp_settings,
)
def _create_dependency_provider(self) -> LanguageServerDependencyProvider:
return self.DependencyProvider(self._custom_settings, self._ls_resources_dir)
class DependencyProvider(LanguageServerDependencyProviderSinglePath):
def _get_or_install_core_dependency(self) -> str:
"""
Setup runtime dependencies for YAML Language Server and return the command to start the server.
"""
# Verify both node and npm are installed
is_node_installed = shutil.which("node") is not None
assert is_node_installed, "node is not installed or isn't in PATH. Please install NodeJS and try again."
is_npm_installed = shutil.which("npm") is not None
assert is_npm_installed, "npm is not installed or isn't in PATH. Please install npm and try again."
deps = RuntimeDependencyCollection(
[
RuntimeDependency(
id="yaml-language-server",
description="yaml-language-server package (Red Hat)",
command="npm install --prefix ./ yaml-language-server@1.19.2",
platform_id="any",
),
]
)
# Install yaml-language-server if not already installed
yaml_ls_dir = os.path.join(self._ls_resources_dir, "yaml-lsp")
yaml_executable_path = os.path.join(yaml_ls_dir, "node_modules", ".bin", "yaml-language-server")
# Handle Windows executable extension
if os.name == "nt":
yaml_executable_path += ".cmd"
if not os.path.exists(yaml_executable_path):
log.info(f"YAML Language Server executable not found at {yaml_executable_path}. Installing...")
deps.install(yaml_ls_dir)
log.info("YAML language server dependencies installed successfully")
if not os.path.exists(yaml_executable_path):
raise FileNotFoundError(
f"yaml-language-server executable not found at {yaml_executable_path}, something went wrong with the installation."
)
return yaml_executable_path
def _create_launch_command(self, core_path: str) -> list[str]:
return [core_path, "--stdio"]
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the YAML Language Server.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"locale": "en",
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"completion": {"dynamicRegistration": True, "completionItem": {"snippetSupport": True}},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
"hover": {"dynamicRegistration": True, "contentFormat": ["markdown", "plaintext"]},
"codeAction": {"dynamicRegistration": True},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"symbol": {"dynamicRegistration": True},
},
},
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
"initializationOptions": {
"yaml": {
"schemaStore": {"enable": True, "url": "https://www.schemastore.org/api/json/catalog.json"},
"format": {"enable": True},
"validate": True,
"hover": True,
"completion": True,
}
},
}
return initialize_params # type: ignore
def _start_server(self) -> None:
"""
Starts the YAML Language Server, waits for the server to be ready and yields the LanguageServer instance.
"""
def register_capability_handler(params: Any) -> None:
return
def do_nothing(params: Any) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting YAML server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
log.debug(f"Received initialize response from YAML server: {init_response}")
# Verify document symbol support is available
if "documentSymbolProvider" in init_response["capabilities"]:
log.info("YAML server supports document symbols")
else:
log.warning("Warning: YAML server does not report document symbol support")
self.server.notify.initialized({})
# YAML language server is ready immediately after initialization
log.info("YAML server initialization complete")
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/yaml_language_server.py",
"license": "MIT License",
"lines": 160,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/util/cache.py | import logging
from typing import Any, Optional
from sensai.util.pickle import dump_pickle, load_pickle
log = logging.getLogger(__name__)
def load_cache(path: str, version: Any) -> Optional[Any]:
data = load_pickle(path)
if not isinstance(data, dict) or "__cache_version" not in data:
log.info("Cache is outdated (expected version %s). Ignoring cache at %s", version, path)
return None
saved_version = data["__cache_version"]
if saved_version != version:
log.info("Cache is outdated (expected version %s, got %s). Ignoring cache at %s", version, saved_version, path)
return None
return data["obj"]
def save_cache(path: str, version: Any, obj: Any) -> None:
data = {"__cache_version": version, "obj": obj}
dump_pickle(data, path)
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/util/cache.py",
"license": "MIT License",
"lines": 17,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
oraios/serena:sync.py | import os
from repo_dir_sync import LibRepo, OtherRepo
r = LibRepo(name="serena", libDirectory="src")
r.add(OtherRepo(name="mux", branch="mux", pathToLib=os.path.join("..", "serena-multiplexer", "src-serena")))
r.runMain()
| {
"repo_id": "oraios/serena",
"file_path": "sync.py",
"license": "MIT License",
"lines": 5,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
oraios/serena:test/serena/test_cli_project_commands.py | """Tests for CLI project commands (create, index)."""
import os
import shutil
import tempfile
import time
from pathlib import Path
import pytest
from click.testing import CliRunner
from serena.cli import ProjectCommands, TopLevelCommands, find_project_root
from serena.config.serena_config import ProjectConfig
pytestmark = pytest.mark.filterwarnings("ignore::UserWarning")
@pytest.fixture
def temp_project_dir():
"""Create a temporary directory for testing."""
tmpdir = tempfile.mkdtemp()
try:
yield tmpdir
finally:
# if windows, wait a bit to avoid PermissionError on cleanup
if os.name == "nt":
time.sleep(0.2)
shutil.rmtree(tmpdir, ignore_errors=True)
@pytest.fixture
def temp_project_dir_with_python_file():
"""Create a temporary directory with a Python file for testing."""
tmpdir = tempfile.mkdtemp()
try:
# Create a simple Python file so language detection works
py_file = os.path.join(tmpdir, "test.py")
with open(py_file, "w") as f:
f.write("def hello():\n pass\n")
yield tmpdir
finally:
# if windows, wait a bit to avoid PermissionError on cleanup
if os.name == "nt":
time.sleep(0.2)
shutil.rmtree(tmpdir, ignore_errors=True)
@pytest.fixture
def cli_runner():
"""Create a CliRunner for testing Click commands."""
return CliRunner()
class TestProjectCreate:
"""Tests for 'project create' command."""
def test_create_basic_with_language(self, cli_runner, temp_project_dir):
"""Test basic project creation with explicit language."""
result = cli_runner.invoke(ProjectCommands.create, [temp_project_dir, "--language", "python"])
assert result.exit_code == 0, f"Command failed: {result.output}"
assert "Generated project" in result.output
assert "python" in result.output.lower()
# Verify project.yml was created
yml_path = os.path.join(temp_project_dir, ".serena", "project.yml")
assert os.path.exists(yml_path), f"project.yml not found at {yml_path}"
def test_create_auto_detect_language(self, cli_runner, temp_project_dir_with_python_file):
"""Test project creation with auto-detected language."""
result = cli_runner.invoke(ProjectCommands.create, [temp_project_dir_with_python_file])
assert result.exit_code == 0, f"Command failed: {result.output}"
assert "Generated project" in result.output
assert "python" in result.output.lower()
# Verify project.yml was created
yml_path = os.path.join(temp_project_dir_with_python_file, ".serena", "project.yml")
assert os.path.exists(yml_path)
def test_create_with_name(self, cli_runner, temp_project_dir):
"""Test project creation with custom name and explicit language."""
result = cli_runner.invoke(ProjectCommands.create, [temp_project_dir, "--name", "my-custom-project", "--language", "python"])
assert result.exit_code == 0, f"Command failed: {result.output}"
assert "Generated project" in result.output
# Verify project.yml was created
yml_path = os.path.join(temp_project_dir, ".serena", "project.yml")
assert os.path.exists(yml_path)
def test_create_with_language(self, cli_runner, temp_project_dir):
"""Test project creation with specified language."""
result = cli_runner.invoke(ProjectCommands.create, [temp_project_dir, "--language", "python"])
assert result.exit_code == 0, f"Command failed: {result.output}"
assert "Generated project" in result.output
assert "python" in result.output.lower()
def test_create_with_multiple_languages(self, cli_runner, temp_project_dir):
"""Test project creation with multiple languages."""
result = cli_runner.invoke(
ProjectCommands.create,
[temp_project_dir, "--language", "python", "--language", "typescript"],
)
assert result.exit_code == 0, f"Command failed: {result.output}"
assert "Generated project" in result.output
def test_create_with_invalid_language(self, cli_runner, temp_project_dir):
"""Test project creation with invalid language raises error."""
result = cli_runner.invoke(
ProjectCommands.create,
[temp_project_dir, "--language", "invalid-lang"],
)
assert result.exit_code != 0, "Should fail with invalid language"
assert "Unknown language" in result.output or "invalid-lang" in result.output
def test_create_already_exists(self, cli_runner, temp_project_dir):
"""Test that creating a project twice fails gracefully."""
# Create once with explicit language
result1 = cli_runner.invoke(ProjectCommands.create, [temp_project_dir, "--language", "python"])
assert result1.exit_code == 0
# Try to create again - should fail gracefully
result2 = cli_runner.invoke(ProjectCommands.create, [temp_project_dir, "--language", "python"])
assert result2.exit_code != 0, "Should fail when project.yml already exists"
assert "already exists" in result2.output.lower()
assert "Error:" in result2.output # Should be user-friendly error
def test_create_with_index_flag(self, cli_runner, temp_project_dir_with_python_file):
"""Test project creation with --index flag performs indexing."""
result = cli_runner.invoke(
ProjectCommands.create,
[temp_project_dir_with_python_file, "--language", "python", "--index", "--log-level", "ERROR", "--timeout", "5"],
)
assert result.exit_code == 0, f"Command failed: {result.output}"
assert "Generated project" in result.output
assert "Indexing project" in result.output
# Verify project.yml was created
yml_path = os.path.join(temp_project_dir_with_python_file, ".serena", "project.yml")
assert os.path.exists(yml_path)
# Verify cache directory was created (proof of indexing)
cache_dir = os.path.join(temp_project_dir_with_python_file, ".serena", "cache")
assert os.path.exists(cache_dir), "Cache directory should exist after indexing"
def test_create_without_index_flag(self, cli_runner, temp_project_dir):
"""Test that project creation without --index does NOT perform indexing."""
result = cli_runner.invoke(ProjectCommands.create, [temp_project_dir, "--language", "python"])
assert result.exit_code == 0
assert "Generated project" in result.output
assert "Indexing" not in result.output
# Verify cache directory was NOT created
cache_dir = os.path.join(temp_project_dir, ".serena", "cache")
assert not os.path.exists(cache_dir), "Cache directory should not exist without --index"
class TestProjectIndex:
"""Tests for 'project index' command."""
def test_index_auto_creates_project_with_files(self, cli_runner, temp_project_dir_with_python_file):
"""Test that index command auto-creates project.yml if it doesn't exist (with source files)."""
result = cli_runner.invoke(ProjectCommands.index, [temp_project_dir_with_python_file, "--log-level", "ERROR", "--timeout", "5"])
# Should succeed and perform indexing
assert result.exit_code == 0, f"Command failed: {result.output}"
assert "Auto-creating" in result.output or "Indexing" in result.output
# Verify project.yml was auto-created
yml_path = os.path.join(temp_project_dir_with_python_file, ".serena", "project.yml")
assert os.path.exists(yml_path), "project.yml should be auto-created"
def test_index_with_explicit_language(self, cli_runner, temp_project_dir):
"""Test index with explicit --language for empty directory."""
result = cli_runner.invoke(
ProjectCommands.index,
[temp_project_dir, "--language", "python", "--log-level", "ERROR", "--timeout", "5"],
)
# Should succeed even without source files if language is explicit
assert result.exit_code == 0, f"Command failed: {result.output}"
yml_path = os.path.join(temp_project_dir, ".serena", "project.yml")
assert os.path.exists(yml_path)
def test_index_with_language_auto_creates(self, cli_runner, temp_project_dir):
"""Test index with --language option for auto-creation."""
result = cli_runner.invoke(
ProjectCommands.index,
[temp_project_dir, "--language", "python", "--log-level", "ERROR"],
)
assert result.exit_code == 0 or "Indexing" in result.output
yml_path = os.path.join(temp_project_dir, ".serena", "project.yml")
assert os.path.exists(yml_path)
def test_index_is_equivalent_to_create_with_index(self, cli_runner, temp_project_dir_with_python_file):
"""Test that 'index' behaves like 'create --index' for new projects."""
# Use manual temp directory creation with Windows-safe cleanup
# to avoid PermissionError on Windows CI when language servers hold file locks
dir1 = tempfile.mkdtemp()
dir2 = tempfile.mkdtemp()
try:
# Setup both directories with same file
for d in [dir1, dir2]:
with open(os.path.join(d, "test.py"), "w") as f:
f.write("def hello():\n pass\n")
# Run 'create --index' on dir1
result1 = cli_runner.invoke(
ProjectCommands.create, [dir1, "--language", "python", "--index", "--log-level", "ERROR", "--timeout", "5"]
)
# Run 'index' on dir2
result2 = cli_runner.invoke(ProjectCommands.index, [dir2, "--language", "python", "--log-level", "ERROR", "--timeout", "5"])
# Both should succeed
assert result1.exit_code == 0, f"create --index failed: {result1.output}"
assert result2.exit_code == 0, f"index failed: {result2.output}"
# Both should create project.yml
assert os.path.exists(os.path.join(dir1, ".serena", "project.yml"))
assert os.path.exists(os.path.join(dir2, ".serena", "project.yml"))
# Both should create cache (proof of indexing)
assert os.path.exists(os.path.join(dir1, ".serena", "cache"))
assert os.path.exists(os.path.join(dir2, ".serena", "cache"))
finally:
# Windows-safe cleanup: wait for file handles to be released
if os.name == "nt":
time.sleep(0.2)
# Use ignore_errors to handle lingering file locks on Windows
shutil.rmtree(dir1, ignore_errors=True)
shutil.rmtree(dir2, ignore_errors=True)
class TestProjectCreateHelper:
"""Tests for _create_project helper method."""
def test_create_project_helper_returns_config(self, temp_project_dir):
"""Test that _create_project returns a ProjectConfig with explicit language."""
config = ProjectCommands._create_project(temp_project_dir, "test-project", ("python",)).project_config
assert isinstance(config, ProjectConfig)
assert config.project_name == "test-project"
def test_create_project_helper_with_auto_detect(self, temp_project_dir_with_python_file):
"""Test _create_project with auto-detected language."""
config = ProjectCommands._create_project(temp_project_dir_with_python_file, "my-project", ()).project_config
assert isinstance(config, ProjectConfig)
assert config.project_name == "my-project"
assert len(config.languages) >= 1
def test_create_project_helper_with_languages(self, temp_project_dir):
"""Test _create_project with language specification."""
config = ProjectCommands._create_project(temp_project_dir, None, ("python", "typescript")).project_config
assert isinstance(config, ProjectConfig)
assert len(config.languages) >= 1
def test_create_project_helper_file_exists_error(self, temp_project_dir):
"""Test _create_project raises error if project.yml exists."""
# Create project first with explicit language
ProjectCommands._create_project(temp_project_dir, None, ("python",))
# Try to create again - should raise FileExistsError
with pytest.raises(FileExistsError):
ProjectCommands._create_project(temp_project_dir, None, ("python",))
class TestFindProjectRoot:
"""Tests for find_project_root helper with virtual chroot boundary."""
def test_finds_serena_from_subdirectory(self, temp_project_dir):
"""Test that .serena/project.yml is found when searching from a subdirectory."""
serena_dir = os.path.join(temp_project_dir, ".serena")
os.makedirs(serena_dir)
Path(os.path.join(serena_dir, "project.yml")).touch()
subdir = os.path.join(temp_project_dir, "src", "nested")
os.makedirs(subdir)
original_cwd = os.getcwd()
try:
os.chdir(subdir)
result = find_project_root(root=temp_project_dir)
assert result is not None
assert os.path.samefile(result, temp_project_dir)
finally:
os.chdir(original_cwd)
def test_serena_preferred_over_git(self, temp_project_dir):
"""Test that .serena/project.yml takes priority over .git at the same level."""
serena_dir = os.path.join(temp_project_dir, ".serena")
os.makedirs(serena_dir)
Path(os.path.join(serena_dir, "project.yml")).touch()
os.makedirs(os.path.join(temp_project_dir, ".git"))
original_cwd = os.getcwd()
try:
os.chdir(temp_project_dir)
result = find_project_root(root=temp_project_dir)
assert result is not None
assert os.path.isdir(os.path.join(result, ".serena"))
assert os.path.samefile(result, temp_project_dir)
finally:
os.chdir(original_cwd)
def test_git_used_as_fallback(self, temp_project_dir):
"""Test that .git is found when no .serena exists."""
os.makedirs(os.path.join(temp_project_dir, ".git"))
subdir = os.path.join(temp_project_dir, "src")
os.makedirs(subdir)
original_cwd = os.getcwd()
try:
os.chdir(subdir)
result = find_project_root(root=temp_project_dir)
assert result is not None
assert os.path.samefile(result, temp_project_dir)
finally:
os.chdir(original_cwd)
def test_falls_back_to_none_when_no_markers(self, temp_project_dir):
"""Test falls back to None when no markers exist within boundary."""
subdir = os.path.join(temp_project_dir, "src")
os.makedirs(subdir)
original_cwd = os.getcwd()
try:
os.chdir(subdir)
result = find_project_root(root=temp_project_dir)
assert result is None
finally:
os.chdir(original_cwd)
class TestProjectFromCwdMutualExclusivity:
"""Tests for --project-from-cwd mutual exclusivity."""
def test_project_from_cwd_with_project_flag_fails(self, cli_runner):
"""Test that --project-from-cwd with --project raises error."""
result = cli_runner.invoke(
TopLevelCommands.start_mcp_server,
["--project-from-cwd", "--project", "/some/path"],
)
assert result.exit_code != 0
assert "cannot be used with" in result.output
if __name__ == "__main__":
# For manual testing, you can run this file directly:
# uv run pytest test/serena/test_cli_project_commands.py -v
pytest.main([__file__, "-v"])
| {
"repo_id": "oraios/serena",
"file_path": "test/serena/test_cli_project_commands.py",
"license": "MIT License",
"lines": 283,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/serena/test_task_executor.py | import time
import pytest
from serena.task_executor import TaskExecutor
@pytest.fixture
def executor():
"""
Fixture for a basic SerenaAgent without a project
"""
return TaskExecutor("TestExecutor")
class Task:
def __init__(self, delay: float, exception: bool = False):
self.delay = delay
self.exception = exception
self.did_run = False
def run(self):
self.did_run = True
time.sleep(self.delay)
if self.exception:
raise ValueError("Task failed")
return True
def test_task_executor_sequence(executor):
"""
Tests that a sequence of tasks is executed correctly
"""
future1 = executor.issue_task(Task(1).run, name="task1")
future2 = executor.issue_task(Task(1).run, name="task2")
assert future1.result() is True
assert future2.result() is True
def test_task_executor_exception(executor):
"""
Tests that tasks that raise exceptions are handled correctly, i.e. that
* the exception is propagated,
* subsequent tasks are still executed.
"""
future1 = executor.issue_task(Task(1, exception=True).run, name="task1")
future2 = executor.issue_task(Task(1).run, name="task2")
have_exception = False
try:
assert future1.result()
except Exception as e:
assert isinstance(e, ValueError)
have_exception = True
assert have_exception
assert future2.result() is True
def test_task_executor_cancel_current(executor):
"""
Tests that tasks that are cancelled are handled correctly, i.e. that
* subsequent tasks are executed as soon as cancellation ensues.
* the cancelled task raises CancelledError when result() is called.
"""
start_time = time.time()
future1 = executor.issue_task(Task(10).run, name="task1")
future2 = executor.issue_task(Task(1).run, name="task2")
time.sleep(1)
future1.cancel()
assert future2.result() is True
end_time = time.time()
assert (end_time - start_time) < 9, "Cancelled task did not stop in time"
have_cancelled_error = False
try:
future1.result()
except Exception as e:
assert e.__class__.__name__ == "CancelledError"
have_cancelled_error = True
assert have_cancelled_error
def test_task_executor_cancel_future(executor):
"""
Tests that when a future task is cancelled, it is never run at all
"""
task1 = Task(10)
task2 = Task(1)
future1 = executor.issue_task(task1.run, name="task1")
future2 = executor.issue_task(task2.run, name="task2")
time.sleep(1)
future2.cancel()
future1.cancel()
try:
future2.result()
except:
pass
assert task1.did_run
assert not task2.did_run
def test_task_executor_cancellation_via_task_info(executor):
start_time = time.time()
executor.issue_task(Task(10).run, "task1")
executor.issue_task(Task(10).run, "task2")
task_infos = executor.get_current_tasks()
task_infos2 = executor.get_current_tasks()
# test expected tasks
assert len(task_infos) == 2
assert "task1" in task_infos[0].name
assert "task2" in task_infos[1].name
# test task identifiers being stable
assert task_infos2[0].task_id == task_infos[0].task_id
# test cancellation
task_infos[0].cancel()
time.sleep(0.5)
task_infos3 = executor.get_current_tasks()
assert len(task_infos3) == 1 # Cancelled task is gone from the queue
task_infos3[0].cancel()
try:
task_infos3[0].future.result()
except:
pass
end_time = time.time()
assert (end_time - start_time) < 9, "Cancelled task did not stop in time"
| {
"repo_id": "oraios/serena",
"file_path": "test/serena/test_task_executor.py",
"license": "MIT License",
"lines": 106,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/fortran/test_fortran_basic.py | """
Basic tests for Fortran language server integration.
These tests validate some low-level LSP functionality and high-level Serena APIs.
Note: These tests require fortls to be installed: pip install fortls
"""
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
from solidlsp.ls_types import SymbolKind
from solidlsp.ls_utils import SymbolUtils
# Mark all tests in this module as fortran tests
pytestmark = pytest.mark.fortran
class TestFortranLanguageServer:
"""Test Fortran language server functionality."""
@pytest.mark.parametrize("language_server", [Language.FORTRAN], indirect=True)
def test_find_symbol(self, language_server: SolidLanguageServer) -> None:
"""Test finding symbols using request_full_symbol_tree."""
symbols = language_server.request_full_symbol_tree()
# Verify program symbol
assert SymbolUtils.symbol_tree_contains_name(symbols, "test_program"), "test_program not found in symbol tree"
# Verify module symbol
assert SymbolUtils.symbol_tree_contains_name(symbols, "math_utils"), "math_utils module not found in symbol tree"
# Verify function symbols
assert SymbolUtils.symbol_tree_contains_name(symbols, "add_numbers"), "add_numbers function not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "multiply_numbers"), "multiply_numbers function not found in symbol tree"
# Verify subroutine symbol
assert SymbolUtils.symbol_tree_contains_name(symbols, "print_result"), "print_result subroutine not found in symbol tree"
@pytest.mark.parametrize("language_server", [Language.FORTRAN], indirect=True)
def test_request_document_symbols(self, language_server: SolidLanguageServer) -> None:
"""Test that document symbols can be retrieved from Fortran files."""
# Test main.f90 - should have a program symbol
main_symbols, _ = language_server.request_document_symbols("main.f90").get_all_symbols_and_roots()
program_names = [s.get("name") for s in main_symbols]
assert "test_program" in program_names, f"Program 'test_program' not found in main.f90. Found: {program_names}"
# Test modules/math_utils.f90 - should have module and function symbols
module_symbols, _ = language_server.request_document_symbols("modules/math_utils.f90").get_all_symbols_and_roots()
all_names = [s.get("name") for s in module_symbols]
assert "math_utils" in all_names, f"Module 'math_utils' not found. Found: {all_names}"
assert "add_numbers" in all_names, f"Function 'add_numbers' not found. Found: {all_names}"
assert "multiply_numbers" in all_names, f"Function 'multiply_numbers' not found. Found: {all_names}"
assert "print_result" in all_names, f"Subroutine 'print_result' not found. Found: {all_names}"
@pytest.mark.parametrize("language_server", [Language.FORTRAN], indirect=True)
def test_find_references_cross_file(self, language_server: SolidLanguageServer) -> None:
"""Test finding references across files using low-level request_references.
This tests the LSP textDocument/references capability.
"""
file_path = "modules/math_utils.f90"
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
# Find the add_numbers function
add_numbers_symbol = None
for sym in symbols[0]:
if sym.get("name") == "add_numbers":
add_numbers_symbol = sym
break
assert add_numbers_symbol is not None, "Could not find 'add_numbers' function symbol in math_utils.f90"
# Use selectionRange to query for references
# Note: FortranLanguageServer automatically fixes fortls's incorrect selectionRange
sel_start = add_numbers_symbol["selectionRange"]["start"]
# Query from the function name position using corrected selectionRange
refs = language_server.request_references(file_path, sel_start["line"], sel_start["character"])
# Should find references (usage in main.f90 + definition in math_utils.f90)
assert len(refs) > 0, "Should find references to add_numbers function"
# Verify that main.f90 references the function
main_refs = [ref for ref in refs if "main.f90" in ref.get("relativePath", "")]
assert (
len(main_refs) > 0
), f"Expected to find reference in main.f90, but found references in: {[ref.get('relativePath') for ref in refs]}"
@pytest.mark.parametrize("language_server", [Language.FORTRAN], indirect=True)
def test_find_definition_cross_file(self, language_server: SolidLanguageServer) -> None:
"""Test finding definition across files using request_definition."""
# In main.f90, line 7 (0-indexed: line 6) contains: result = add_numbers(5.0, 3.0)
# We want to find the definition of add_numbers in modules/math_utils.f90
main_file = "main.f90"
# Position on 'add_numbers' usage (approximately column 13)
definition_location_list = language_server.request_definition(main_file, 6, 13)
if not definition_location_list:
pytest.skip("fortls does not support cross-file go-to-definition for this case")
assert len(definition_location_list) >= 1, "Should find at least one definition"
definition_location = definition_location_list[0]
# The definition should be in modules/math_utils.f90
assert "math_utils.f90" in definition_location.get(
"uri", ""
), f"Expected definition to be in math_utils.f90, but found in: {definition_location.get('uri')}"
# Verify the definition is around the correct line (line 4, 0-indexed)
assert (
definition_location["range"]["start"]["line"] == 4
), f"Expected definition at line 4, but found at line {definition_location['range']['start']['line']}"
@pytest.mark.parametrize("language_server", [Language.FORTRAN], indirect=True)
def test_request_referencing_symbols(self, language_server: SolidLanguageServer) -> None:
"""Test finding symbols that reference a function - Serena's high-level API.
This tests request_referencing_symbols which returns not just locations but also
the containing symbols that have the references. This is different from
test_find_references_cross_file which only returns locations.
Note: FortranLanguageServer automatically fixes fortls's incorrect selectionRange.
"""
# Get the add_numbers function symbol from math_utils.f90
file_path = "modules/math_utils.f90"
symbols, _ = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
# Find the add_numbers function
add_numbers_symbol = None
for sym in symbols:
if sym.get("name") == "add_numbers":
add_numbers_symbol = sym
break
assert add_numbers_symbol is not None, "Could not find 'add_numbers' function symbol"
# Use selectionRange to query for referencing symbols
# FortranLanguageServer automatically corrects fortls's incorrect selectionRange
sel_start = add_numbers_symbol["selectionRange"]["start"]
referencing_symbols = language_server.request_referencing_symbols(file_path, sel_start["line"], sel_start["character"])
# Should find referencing symbols (not just locations, but symbols containing the references)
assert len(referencing_symbols) > 0, "Should find referencing symbols when querying from function name position"
# Extract the symbols from ReferenceInSymbol objects
# This is what makes this test different from test_find_references_cross_file:
# we're testing that we get back SYMBOLS (with name, kind, location) not just locations
ref_symbols = [ref.symbol for ref in referencing_symbols]
# Verify we got valid symbol structures with all required fields
for symbol in ref_symbols:
assert "name" in symbol, f"Symbol should have a name: {symbol}"
assert "kind" in symbol, f"Symbol should have a kind: {symbol}"
# Each symbol should have location information
assert "location" in symbol, f"Symbol should have location: {symbol}"
# Note: fortls may not return all cross-file references through request_referencing_symbols
# because it depends on finding containing symbols for each reference. We verify that
# the API works and returns valid symbols with proper structure.
@pytest.mark.parametrize("language_server", [Language.FORTRAN], indirect=True)
def test_request_defining_symbol(self, language_server: SolidLanguageServer) -> None:
"""Test finding the defining symbol - Serena's high-level API.
This is similar to test_find_definition_cross_file but uses the high-level
request_defining_symbol which returns a full symbol with metadata, not just a location.
"""
# In main.f90, line 7 (0-indexed: line 6) contains: result = add_numbers(5.0, 3.0)
# We want to find the definition of add_numbers
main_file = "main.f90"
# Get the position of add_numbers usage in main.f90
# Position on 'add_numbers' (approximately column 13)
defining_symbol = language_server.request_defining_symbol(main_file, 6, 13)
if defining_symbol is None:
pytest.skip("fortls does not support cross-file go-to-definition for this case")
# Should find the add_numbers function with full symbol information
assert defining_symbol.get("name") == "add_numbers", f"Expected to find 'add_numbers' but got '{defining_symbol.get('name')}'"
# Check if we have location information
if "location" not in defining_symbol or "relativePath" not in defining_symbol["location"]:
pytest.skip("fortls found the symbol but doesn't provide complete location information")
# The definition should be in modules/math_utils.f90
defining_path = defining_symbol["location"]["relativePath"]
assert "math_utils.f90" in defining_path, f"Expected definition to be in math_utils.f90, but found in: {defining_path}"
@pytest.mark.parametrize("language_server", [Language.FORTRAN], indirect=True)
def test_request_containing_symbol(self, language_server: SolidLanguageServer) -> None:
"""Test finding the containing symbol for a position in the code."""
# Test finding the containing symbol for a position inside the add_numbers function
file_path = "modules/math_utils.f90"
# Line 8 (0-indexed: line 7) is inside the add_numbers function: "sum = a + b"
containing_symbol = language_server.request_containing_symbol(file_path, 7, 10, include_body=False)
if containing_symbol is None:
pytest.skip("fortls does not support request_containing_symbol or couldn't find the containing symbol")
# Should find the add_numbers function as the containing symbol
assert (
containing_symbol.get("name") == "add_numbers"
), f"Expected containing symbol 'add_numbers', got '{containing_symbol.get('name')}'"
# Verify the symbol kind is Function
assert (
containing_symbol.get("kind") == SymbolKind.Function.value
), f"Expected Function kind ({SymbolKind.Function.value}), got {containing_symbol.get('kind')}"
# Verify location information exists
assert "location" in containing_symbol, "Containing symbol should have location information"
location = containing_symbol["location"]
assert "range" in location, "Location should contain range information"
assert "start" in location["range"] and "end" in location["range"], "Range should have start and end positions"
@pytest.mark.parametrize("language_server", [Language.FORTRAN], indirect=True)
def test_type_and_interface_symbols(self, language_server: SolidLanguageServer) -> None:
"""Test that type definitions and interfaces are properly recognized with corrected selectionRange.
This verifies that the regex pattern correctly handles:
- Simple type definitions (type Name)
- Type with double colon (type :: Name)
- Type with extends (type, extends(Base) :: Derived)
- Named interfaces
fortls returns these as SymbolKind.Class (11) for types and SymbolKind.Interface (5) for interfaces.
"""
file_path = "modules/geometry.f90"
symbols, _ = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
# Find type and interface symbols
type_names = []
interface_names = []
for sym in symbols:
if sym.get("kind") == SymbolKind.Class.value: # Type definitions
type_names.append(sym.get("name"))
elif sym.get("kind") == SymbolKind.Interface.value: # Interfaces
interface_names.append(sym.get("name"))
# Verify type definitions are found
assert "Point2D" in type_names, f"Simple type 'Point2D' not found. Found types: {type_names}"
assert "Circle" in type_names, f"Type with :: syntax 'Circle' not found. Found types: {type_names}"
assert "Point3D" in type_names, f"Type with extends 'Point3D' not found. Found types: {type_names}"
# Verify interface is found
assert "distance" in interface_names, f"Interface 'distance' not found. Found interfaces: {interface_names}"
# Verify selectionRange is corrected for a type symbol
point3d_symbol = None
for sym in symbols:
if sym.get("name") == "Point3D":
point3d_symbol = sym
break
assert point3d_symbol is not None, "Could not find 'Point3D' type symbol"
# Use corrected selectionRange to find references
# This tests that the fix works for types (not just functions)
sel_start = point3d_symbol["selectionRange"]["start"]
# Verify selectionRange points to identifier name, not line start
# Line for "type, extends(Point2D) :: Point3D" has Point3D at position > 0
assert (
sel_start["character"] > 0
), f"selectionRange should point to identifier, not line start. Got character: {sel_start['character']}"
# Test that we can find references using the corrected position
_refs = language_server.request_references(file_path, sel_start["line"], sel_start["character"])
# refs might be empty if Point3D isn't used elsewhere, but the call should not fail
# The important thing is that it doesn't error due to wrong character position
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/fortran/test_fortran_basic.py",
"license": "MIT License",
"lines": 213,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/haskell/test_haskell_basic.py | """
Rigorous tests for Haskell Language Server integration with Serena.
Tests prove that Serena's symbol tools can:
1. Discover all expected symbols with precise matching
2. Track cross-file references accurately
3. Identify data type structures and record fields
4. Navigate between definitions and usages
Test Repository Structure:
- src/Calculator.hs: Calculator data type, arithmetic functions (add, subtract, multiply, divide, calculate)
- src/Helper.hs: Helper functions (validateNumber, isPositive, isNegative, absolute)
- app/Main.hs: Main entry point using Calculator and Helper modules
"""
import sys
import pytest
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import Language
@pytest.mark.haskell
@pytest.mark.skipif(sys.platform == "win32", reason="HLS not installed on Windows CI")
class TestHaskellLanguageServer:
@pytest.mark.parametrize("language_server", [Language.HASKELL], indirect=True)
def test_calculator_module_symbols(self, language_server: SolidLanguageServer):
"""
Test precise symbol discovery in Calculator.hs.
Verifies that Serena can identify:
- Data type definition (Calculator with record fields)
- All exported functions with correct names
- Module structure
"""
all_symbols, _ = language_server.request_document_symbols("src/Calculator.hs").get_all_symbols_and_roots()
symbol_names = {s["name"] for s in all_symbols}
# Verify exact set of expected top-level symbols
expected_symbols = {
"Calculator", # Data type
"add", # Function: Int -> Int -> Int
"subtract", # Function: Int -> Int -> Int
"multiply", # Function: Int -> Int -> Int
"divide", # Function: Int -> Int -> Maybe Int
"calculate", # Function: Calculator -> String -> Int -> Int -> Maybe Int
}
# Verify all expected symbols are present
missing = expected_symbols - symbol_names
assert not missing, f"Missing expected symbols in Calculator.hs: {missing}"
# Verify Calculator data type exists
calculator_symbol = next((s for s in all_symbols if s["name"] == "Calculator"), None)
assert calculator_symbol is not None, "Calculator data type not found"
# The Calculator should be identified as a data type
# HLS may use different SymbolKind values (1=File, 5=Class, 23=Struct)
assert calculator_symbol["kind"] in [
1,
5,
23,
], f"Calculator should be a data type (kind 1, 5, or 23), got kind {calculator_symbol['kind']}"
@pytest.mark.parametrize("language_server", [Language.HASKELL], indirect=True)
def test_helper_module_symbols(self, language_server: SolidLanguageServer):
"""
Test precise symbol discovery in Helper.hs.
Verifies Serena identifies all helper functions that are imported
and used by Calculator module.
"""
all_symbols, _ = language_server.request_document_symbols("src/Helper.hs").get_all_symbols_and_roots()
symbol_names = {s["name"] for s in all_symbols}
# Verify expected helper functions (module name may also appear)
expected_symbols = {
"validateNumber", # Function used by Calculator.add and Calculator.subtract
"isPositive", # Predicate function
"isNegative", # Predicate function used by absolute
"absolute", # Function that uses isNegative
}
# All expected symbols should be present (module name is optional)
missing = expected_symbols - symbol_names
assert not missing, f"Missing expected symbols in Helper.hs: {missing}"
# Verify no unexpected symbols beyond the module name
extra = symbol_names - expected_symbols - {"Helper"}
assert not extra, f"Unexpected symbols in Helper.hs: {extra}"
@pytest.mark.parametrize("language_server", [Language.HASKELL], indirect=True)
def test_main_module_imports(self, language_server: SolidLanguageServer):
"""
Test that Main.hs properly references both Calculator and Helper modules.
Verifies Serena can identify cross-module dependencies.
"""
all_symbols, _ = language_server.request_document_symbols("app/Main.hs").get_all_symbols_and_roots()
symbol_names = {s["name"] for s in all_symbols}
# Main.hs should have the main function
assert "main" in symbol_names, "Main.hs should contain 'main' function"
@pytest.mark.parametrize("language_server", [Language.HASKELL], indirect=True)
def test_cross_file_references_validateNumber(self, language_server: SolidLanguageServer):
"""
Test cross-file reference tracking for validateNumber function.
validateNumber is defined in Helper.hs:9 and used in:
- Calculator.hs:21 (in add function)
- Calculator.hs:25 (in subtract function)
This proves Serena can track function usage across module boundaries.
"""
# Get references to validateNumber (defined at line 9, 0-indexed = line 8)
references = language_server.request_references("src/Helper.hs", line=8, column=0)
# Should find at least: definition in Helper.hs + 2 usages in Calculator.hs
assert len(references) >= 2, f"Expected at least 2 references to validateNumber (used in add and subtract), got {len(references)}"
# Verify we have references in Calculator.hs
reference_paths = [ref["relativePath"] for ref in references]
calculator_refs = [path for path in reference_paths if "Calculator.hs" in path]
assert len(calculator_refs) >= 2, (
f"Expected at least 2 references in Calculator.hs (add and subtract functions), "
f"got {len(calculator_refs)} references in Calculator.hs"
)
@pytest.mark.parametrize("language_server", [Language.HASKELL], indirect=True)
def test_within_file_references_isNegative(self, language_server: SolidLanguageServer):
"""
Test within-file reference tracking for isNegative function.
isNegative is defined in Helper.hs:17 and used in Helper.hs:22 (absolute function).
This proves Serena can track intra-module function calls.
"""
# isNegative defined at line 17 (0-indexed = line 16)
references = language_server.request_references("src/Helper.hs", line=16, column=0)
# Should find: definition + usage in absolute function
assert len(references) >= 1, f"Expected at least 1 reference to isNegative (used in absolute), got {len(references)}"
# All references should be in Helper.hs
reference_paths = [ref["relativePath"] for ref in references]
assert all(
"Helper.hs" in path for path in reference_paths
), f"All isNegative references should be in Helper.hs, got: {reference_paths}"
@pytest.mark.parametrize("language_server", [Language.HASKELL], indirect=True)
def test_function_references_from_main(self, language_server: SolidLanguageServer):
"""
Test that functions used in Main.hs can be traced back to their definitions.
Main.hs:12 calls 'add' from Calculator module.
Main.hs:25 calls 'isPositive' from Helper module.
Main.hs:26 calls 'absolute' from Helper module.
This proves Serena can track cross-module function calls from executable code.
"""
# Test 'add' function references (defined in Calculator.hs:20, 0-indexed = line 19)
add_refs = language_server.request_references("src/Calculator.hs", line=19, column=0)
# Should find references in Main.hs and possibly Calculator.hs (calculate function uses it)
assert len(add_refs) >= 1, f"Expected at least 1 reference to 'add', got {len(add_refs)}"
add_ref_paths = [ref["relativePath"] for ref in add_refs]
# Should have at least one reference in Main.hs or Calculator.hs
assert any(
"Main.hs" in path or "Calculator.hs" in path for path in add_ref_paths
), f"Expected 'add' to be referenced in Main.hs or Calculator.hs, got: {add_ref_paths}"
@pytest.mark.parametrize("language_server", [Language.HASKELL], indirect=True)
def test_multiply_function_usage_in_calculate(self, language_server: SolidLanguageServer):
"""
Test that multiply function usage is tracked within Calculator module.
multiply is defined in Calculator.hs:28 and used in:
- Calculator.hs:41 (in calculate function via pattern matching)
- Main.hs:20 (via calculate call with "multiply" operator)
This proves Serena can track function references even when called indirectly.
"""
# multiply defined at line 28 (0-indexed = line 27)
multiply_refs = language_server.request_references("src/Calculator.hs", line=27, column=0)
# Should find at least the usage in calculate function
assert len(multiply_refs) >= 1, f"Expected at least 1 reference to 'multiply', got {len(multiply_refs)}"
# Should have reference in Calculator.hs (calculate function)
multiply_ref_paths = [ref["relativePath"] for ref in multiply_refs]
assert any(
"Calculator.hs" in path for path in multiply_ref_paths
), f"Expected 'multiply' to be referenced in Calculator.hs, got: {multiply_ref_paths}"
@pytest.mark.parametrize("language_server", [Language.HASKELL], indirect=True)
def test_data_type_constructor_references(self, language_server: SolidLanguageServer):
"""
Test that Calculator data type constructor usage is tracked.
Calculator is defined in Calculator.hs:14 and used in:
- Main.hs:8 (constructor call: Calculator "TestCalc" 1)
- Calculator.hs:37 (type signature for calculate function)
This proves Serena can track data type constructor references.
"""
# Calculator data type defined at line 14 (0-indexed = line 13)
calculator_refs = language_server.request_references("src/Calculator.hs", line=13, column=5)
# Should find usage in Main.hs
assert len(calculator_refs) >= 1, f"Expected at least 1 reference to Calculator constructor, got {len(calculator_refs)}"
# Should have at least one reference in Main.hs or Calculator.hs
calc_ref_paths = [ref["relativePath"] for ref in calculator_refs]
assert any(
"Main.hs" in path or "Calculator.hs" in path for path in calc_ref_paths
), f"Expected Calculator to be referenced in Main.hs or Calculator.hs, got: {calc_ref_paths}"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/haskell/test_haskell_basic.py",
"license": "MIT License",
"lines": 174,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/julia/test_julia_basic.py | import pytest
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import Language
@pytest.mark.julia
class TestJuliaLanguageServer:
@pytest.mark.parametrize("language_server", [Language.JULIA], indirect=True)
def test_julia_symbols(self, language_server: SolidLanguageServer):
"""
Test if we can find the top-level symbols in the main.jl file.
"""
all_symbols, _ = language_server.request_document_symbols("main.jl").get_all_symbols_and_roots()
symbol_names = {s["name"] for s in all_symbols}
assert "calculate_sum" in symbol_names
assert "main" in symbol_names
@pytest.mark.parametrize("language_server", [Language.JULIA], indirect=True)
def test_julia_within_file_references(self, language_server: SolidLanguageServer):
"""
Test finding references to a function within the same file.
"""
# Find references to 'calculate_sum' - the function name starts at line 2, column 9
# LSP uses 0-based indexing
references = language_server.request_references("main.jl", line=2, column=9)
# Should find at least the definition and the call site
assert len(references) >= 1, f"Expected at least 1 reference, got {len(references)}"
# Verify at least one reference is in main.jl
reference_paths = [ref["relativePath"] for ref in references]
assert "main.jl" in reference_paths
@pytest.mark.parametrize("language_server", [Language.JULIA], indirect=True)
def test_julia_cross_file_references(self, language_server: SolidLanguageServer):
"""
Test finding references to a function defined in another file.
"""
# The 'say_hello' function name starts at line 1, column 13 in lib/helper.jl
# LSP uses 0-based indexing
references = language_server.request_references("lib/helper.jl", line=1, column=13)
# Should find at least the call site in main.jl
assert len(references) >= 1, f"Expected at least 1 reference, got {len(references)}"
# Verify at least one reference points to the usage
reference_paths = [ref["relativePath"] for ref in references]
# The reference might be in either file (definition or usage)
assert "main.jl" in reference_paths or "lib/helper.jl" in reference_paths
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/julia/test_julia_basic.py",
"license": "MIT License",
"lines": 41,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/scala/test_scala_language_server.py | # type: ignore
import os
import pytest
from solidlsp.language_servers.scala_language_server import ScalaLanguageServer
from solidlsp.ls_config import Language, LanguageServerConfig
from solidlsp.settings import SolidLSPSettings
pytest.skip("Scala must be compiled for these tests to run through, which is a huge hassle", allow_module_level=True)
MAIN_FILE_PATH = os.path.join("src", "main", "scala", "com", "example", "Main.scala")
pytestmark = pytest.mark.scala
@pytest.fixture(scope="module")
def scala_ls():
repo_root = os.path.abspath("test/resources/repos/scala")
config = LanguageServerConfig(code_language=Language.SCALA)
solidlsp_settings = SolidLSPSettings()
ls = ScalaLanguageServer(config, repo_root, solidlsp_settings)
with ls.start_server():
yield ls
def test_scala_document_symbols(scala_ls):
"""Test document symbols for Main.scala"""
symbols, _ = scala_ls.request_document_symbols(MAIN_FILE_PATH).get_all_symbols_and_roots()
symbol_names = [s["name"] for s in symbols]
assert symbol_names[0] == "com.example"
assert symbol_names[1] == "Main"
assert symbol_names[2] == "main"
assert symbol_names[3] == "result"
assert symbol_names[4] == "sum"
assert symbol_names[5] == "add"
assert symbol_names[6] == "someMethod"
assert symbol_names[7] == "str"
assert symbol_names[8] == "Config"
assert symbol_names[9] == "field1" # confirm https://github.com/oraios/serena/issues/688
def test_scala_references_within_same_file(scala_ls):
"""Test finding references within the same file."""
definitions = scala_ls.request_definition(MAIN_FILE_PATH, 12, 23)
first_def = definitions[0]
assert first_def["uri"].endswith("Main.scala")
assert first_def["range"]["start"]["line"] == 16
assert first_def["range"]["start"]["character"] == 6
assert first_def["range"]["end"]["line"] == 16
assert first_def["range"]["end"]["character"] == 9
def test_scala_find_definition_and_references_across_files(scala_ls):
definitions = scala_ls.request_definition(MAIN_FILE_PATH, 8, 25)
assert len(definitions) == 1
first_def = definitions[0]
assert first_def["uri"].endswith("Utils.scala")
assert first_def["range"]["start"]["line"] == 7
assert first_def["range"]["start"]["character"] == 6
assert first_def["range"]["end"]["line"] == 7
assert first_def["range"]["end"]["character"] == 14
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/scala/test_scala_language_server.py",
"license": "MIT License",
"lines": 49,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/yaml_ls/test_yaml_basic.py | """
Basic integration tests for the YAML language server functionality.
These tests validate the functionality of the language server APIs
like request_document_symbols using the YAML test repository.
"""
from pathlib import Path
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
@pytest.mark.yaml
class TestYAMLLanguageServerBasics:
"""Test basic functionality of the YAML language server."""
@pytest.mark.parametrize("language_server", [Language.YAML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.YAML], indirect=True)
def test_yaml_language_server_initialization(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that YAML language server can be initialized successfully."""
assert language_server is not None
assert language_server.language == Language.YAML
assert language_server.is_running()
assert Path(language_server.language_server.repository_root_path).resolve() == repo_path.resolve()
@pytest.mark.parametrize("language_server", [Language.YAML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.YAML], indirect=True)
def test_yaml_config_file_symbols(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test document symbols detection in config.yaml with specific symbol verification."""
all_symbols, root_symbols = language_server.request_document_symbols("config.yaml").get_all_symbols_and_roots()
assert all_symbols is not None, "Should return symbols for config.yaml"
assert len(all_symbols) > 0, f"Should find symbols in config.yaml, found {len(all_symbols)}"
# Verify specific top-level keys are detected
symbol_names = [sym.get("name") for sym in all_symbols]
assert "app" in symbol_names, "Should detect 'app' key in config.yaml"
assert "database" in symbol_names, "Should detect 'database' key in config.yaml"
assert "logging" in symbol_names, "Should detect 'logging' key in config.yaml"
assert "features" in symbol_names, "Should detect 'features' key in config.yaml"
# Verify nested symbols exist (child keys under 'app')
assert "name" in symbol_names, "Should detect nested 'name' key"
assert "port" in symbol_names, "Should detect nested 'port' key"
assert "debug" in symbol_names, "Should detect nested 'debug' key"
# Check symbol kinds are appropriate (LSP kinds: 2=module/namespace, 15=string, 16=number, 17=boolean)
app_symbol = next((s for s in all_symbols if s.get("name") == "app"), None)
assert app_symbol is not None, "Should find 'app' symbol"
assert app_symbol.get("kind") == 2, "Top-level object should have kind 2 (module/namespace)"
port_symbol = next((s for s in all_symbols if s.get("name") == "port"), None)
assert port_symbol is not None, "Should find 'port' symbol"
assert port_symbol.get("kind") == 16, "'port' should have kind 16 (number)"
debug_symbol = next((s for s in all_symbols if s.get("name") == "debug"), None)
assert debug_symbol is not None, "Should find 'debug' symbol"
assert debug_symbol.get("kind") == 17, "'debug' should have kind 17 (boolean)"
@pytest.mark.parametrize("language_server", [Language.YAML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.YAML], indirect=True)
def test_yaml_services_file_symbols(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test symbol detection in services.yml Docker Compose file."""
all_symbols, root_symbols = language_server.request_document_symbols("services.yml").get_all_symbols_and_roots()
assert all_symbols is not None, "Should return symbols for services.yml"
assert len(all_symbols) > 0, f"Should find symbols in services.yml, found {len(all_symbols)}"
# Verify specific top-level keys from Docker Compose file
symbol_names = [sym.get("name") for sym in all_symbols]
assert "version" in symbol_names, "Should detect 'version' key"
assert "services" in symbol_names, "Should detect 'services' key"
assert "networks" in symbol_names, "Should detect 'networks' key"
assert "volumes" in symbol_names, "Should detect 'volumes' key"
# Verify service names
assert "web" in symbol_names, "Should detect 'web' service"
assert "api" in symbol_names, "Should detect 'api' service"
assert "database" in symbol_names, "Should detect 'database' service"
# Check that arrays are properly detected
ports_symbols = [s for s in all_symbols if s.get("name") == "ports"]
assert len(ports_symbols) > 0, "Should find 'ports' arrays in services"
# Arrays should have kind 18
for ports_sym in ports_symbols:
assert ports_sym.get("kind") == 18, "'ports' should have kind 18 (array)"
@pytest.mark.parametrize("language_server", [Language.YAML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.YAML], indirect=True)
def test_yaml_data_file_symbols(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test symbol detection in data.yaml file with array structures."""
all_symbols, root_symbols = language_server.request_document_symbols("data.yaml").get_all_symbols_and_roots()
assert all_symbols is not None, "Should return symbols for data.yaml"
assert len(all_symbols) > 0, f"Should find symbols in data.yaml, found {len(all_symbols)}"
# Verify top-level keys
symbol_names = [sym.get("name") for sym in all_symbols]
assert "users" in symbol_names, "Should detect 'users' array"
assert "projects" in symbol_names, "Should detect 'projects' array"
# Verify array elements (indexed by position)
# data.yaml has user entries and project entries
assert "id" in symbol_names, "Should detect 'id' fields in array elements"
assert "name" in symbol_names, "Should detect 'name' fields"
assert "email" in symbol_names, "Should detect 'email' fields"
assert "roles" in symbol_names, "Should detect 'roles' arrays"
@pytest.mark.parametrize("language_server", [Language.YAML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.YAML], indirect=True)
def test_yaml_symbols_with_body(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test request_document_symbols with body extraction."""
all_symbols, root_symbols = language_server.request_document_symbols("config.yaml").get_all_symbols_and_roots()
assert all_symbols is not None, "Should return symbols for config.yaml"
assert len(all_symbols) > 0, "Should have symbols"
# Find the 'app' symbol and verify its body
app_symbol = next((s for s in all_symbols if s.get("name") == "app"), None)
assert app_symbol is not None, "Should find 'app' symbol"
# Check that body exists and contains expected content
assert "body" in app_symbol, "'app' symbol should have body"
app_body = app_symbol["body"].get_text()
assert "app:" in app_body, "Body should start with 'app:'"
assert "name: test-application" in app_body, "Body should contain 'name' field"
assert "version: 1.0.0" in app_body, "Body should contain 'version' field"
assert "port: 8080" in app_body, "Body should contain 'port' field"
assert "debug: true" in app_body, "Body should contain 'debug' field"
# Find a simple string value symbol and verify its body
name_symbols = [s for s in all_symbols if s.get("name") == "name" and "body" in s]
assert len(name_symbols) > 0, "Should find 'name' symbols with bodies"
# At least one should contain "test-application"
assert any("test-application" in s["body"].get_text() for s in name_symbols), "Should find name with test-application"
# Find the database symbol and check its body
database_symbol = next((s for s in all_symbols if s.get("name") == "database"), None)
assert database_symbol is not None, "Should find 'database' symbol"
assert "body" in database_symbol, "'database' symbol should have body"
db_body = database_symbol["body"].get_text()
assert "database:" in db_body, "Body should start with 'database:'"
assert "host: localhost" in db_body, "Body should contain host configuration"
assert "port: 5432" in db_body, "Body should contain port configuration"
@pytest.mark.parametrize("language_server", [Language.YAML], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.YAML], indirect=True)
def test_yaml_symbol_ranges(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that symbols have proper range information."""
all_symbols, root_symbols = language_server.request_document_symbols("config.yaml").get_all_symbols_and_roots()
assert all_symbols is not None
assert len(all_symbols) > 0
# Check the 'app' symbol range
app_symbol = next((s for s in all_symbols if s.get("name") == "app"), None)
assert app_symbol is not None, "Should find 'app' symbol"
assert "range" in app_symbol, "'app' symbol should have range"
app_range = app_symbol["range"]
assert "start" in app_range, "Range should have start"
assert "end" in app_range, "Range should have end"
assert app_range["start"]["line"] == 1, "'app' should start at line 1 (0-indexed, actual line 2)"
# The app block spans from line 2 to line 7 in the file (1-indexed)
# In 0-indexed LSP coordinates: line 1 (start) to line 6 (end)
assert app_range["end"]["line"] == 6, "'app' should end at line 6 (0-indexed)"
# Check a nested symbol range
port_symbols = [s for s in all_symbols if s.get("name") == "port"]
assert len(port_symbols) > 0, "Should find 'port' symbols"
# Find the one under 'app' (should be at line 4 in 0-indexed, actual line 5)
app_port = next((s for s in port_symbols if s["range"]["start"]["line"] == 4), None)
assert app_port is not None, "Should find 'port' under 'app'"
assert app_port["range"]["start"]["character"] == 2, "'port' should be indented 2 spaces"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/yaml_ls/test_yaml_basic.py",
"license": "MIT License",
"lines": 143,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:src/solidlsp/language_servers/al_language_server.py | """AL Language Server implementation for Microsoft Dynamics 365 Business Central."""
import logging
import os
import pathlib
import platform
import re
import stat
import time
import zipfile
from pathlib import Path
import requests
from overrides import override
from solidlsp import ls_types
from solidlsp.language_servers.common import quote_windows_path
from solidlsp.ls import DocumentSymbols, LSPFileBuffer, SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.ls_types import SymbolKind, UnifiedSymbolInformation
from solidlsp.lsp_protocol_handler.lsp_types import Definition, DefinitionParams, LocationLink
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class ALLanguageServer(SolidLanguageServer):
"""
Language server implementation for AL (Microsoft Dynamics 365 Business Central).
This implementation uses the AL Language Server from the VS Code AL extension
(ms-dynamics-smb.al). The extension must be installed or available locally.
Key Features:
- Automatic download of AL extension from VS Code marketplace if not present
- Platform-specific executable detection (Windows/Linux/macOS)
- Special initialization sequence required by AL Language Server
- Custom AL-specific LSP commands (al/gotodefinition, al/setActiveWorkspace)
- File opening requirement before symbol retrieval
"""
# Regex pattern to match AL object names like:
# - 'Table 50000 "TEST Customer"' -> captures 'TEST Customer'
# - 'Codeunit 50000 CustomerMgt' -> captures 'CustomerMgt'
# - 'Interface IPaymentProcessor' -> captures 'IPaymentProcessor'
# - 'Enum 50000 CustomerType' -> captures 'CustomerType'
# Pattern: <ObjectType> [<ID>] (<QuotedName>|<UnquotedName>)
_AL_OBJECT_NAME_PATTERN = re.compile(
r"^(?:Table|Page|Codeunit|Enum|Interface|Report|Query|XMLPort|PermissionSet|"
r"PermissionSetExtension|Profile|PageExtension|TableExtension|EnumExtension|"
r"PageCustomization|ReportExtension|ControlAddin|DotNetPackage)" # Object type
r"(?:\s+\d+)?" # Optional object ID
r"\s+" # Required space before name
r'(?:"([^"]+)"|(\S+))$' # Quoted name (group 1) or unquoted identifier (group 2)
)
@staticmethod
def _extract_al_display_name(full_name: str) -> str:
"""
Extract the display name from an AL symbol's full name.
AL Language Server returns symbol names in format:
- 'Table 50000 "TEST Customer"' -> 'TEST Customer'
- 'Codeunit 50000 CustomerMgt' -> 'CustomerMgt'
- 'Interface IPaymentProcessor' -> 'IPaymentProcessor'
- 'fields' -> 'fields' (non-AL-object symbols pass through unchanged)
Args:
full_name: The full symbol name as returned by AL Language Server
Returns:
The extracted display name for matching, or the original name if not an AL object
"""
match = ALLanguageServer._AL_OBJECT_NAME_PATTERN.match(full_name)
if match:
# Return quoted name (group 1) or unquoted name (group 2)
return match.group(1) or match.group(2) or full_name
return full_name
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Initialize the AL Language Server.
Args:
config: Language server configuration
logger: Logger instance for debugging
repository_root_path: Root path of the AL project (must contain app.json)
solidlsp_settings: Solid LSP settings
Note:
The initialization process will automatically:
1. Check for AL extension in the resources directory
2. Download it from VS Code marketplace if not found
3. Extract and configure the platform-specific executable
"""
# Setup runtime dependencies and get the language server command
# This will download the AL extension if needed
cmd = self._setup_runtime_dependencies(config, solidlsp_settings)
self._project_load_check_supported: bool = True
"""Whether the AL server supports the project load status check request.
Some AL server versions don't support the 'al/hasProjectClosureLoadedRequest'
custom LSP request. This flag starts as True and is set to False if the
request fails, preventing repeated unsuccessful attempts.
"""
super().__init__(config, repository_root_path, ProcessLaunchInfo(cmd=cmd, cwd=repository_root_path), "al", solidlsp_settings)
# Cache mapping (file_path, line, char) -> original_full_name for hover injection
self._al_original_names: dict[tuple[str, int, int], str] = {}
@staticmethod
def _normalize_path(path: str) -> str:
"""Normalize file path for consistent cache key usage across platforms."""
return path.replace("\\", "/")
@classmethod
def _download_al_extension(cls, url: str, target_dir: str) -> bool:
"""
Download and extract the AL extension from VS Code marketplace.
The VS Code marketplace packages extensions as .vsix files (which are ZIP archives).
This method downloads the VSIX file and extracts it to get the language server binaries.
Args:
logger: Logger for tracking download progress
url: VS Code marketplace URL for the AL extension
target_dir: Directory where the extension will be extracted
Returns:
True if successful, False otherwise
Note:
The download includes progress tracking and proper user-agent headers
to ensure compatibility with the VS Code marketplace.
"""
try:
log.info(f"Downloading AL extension from {url}")
# Create target directory for the extension
os.makedirs(target_dir, exist_ok=True)
# Download with proper headers to mimic VS Code marketplace client
# These headers are required for the marketplace to serve the VSIX file
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
"Accept": "application/octet-stream, application/vsix, */*",
}
response = requests.get(url, headers=headers, stream=True, timeout=300)
response.raise_for_status()
# Save to temporary VSIX file (will be deleted after extraction)
temp_file = os.path.join(target_dir, "al_extension_temp.vsix")
total_size = int(response.headers.get("content-length", 0))
log.info(f"Downloading {total_size / 1024 / 1024:.1f} MB...")
with open(temp_file, "wb") as f:
downloaded = 0
for chunk in response.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
downloaded += len(chunk)
if total_size > 0 and downloaded % (10 * 1024 * 1024) == 0: # Log progress every 10MB
progress = (downloaded / total_size) * 100
log.info(f"Download progress: {progress:.1f}%")
log.info("Download complete, extracting...")
# Extract VSIX file (VSIX files are just ZIP archives with a different extension)
# This will extract the extension folder containing the language server binaries
with zipfile.ZipFile(temp_file, "r") as zip_ref:
zip_ref.extractall(target_dir)
# Clean up temp file
os.remove(temp_file)
log.info("AL extension extracted successfully")
return True
except Exception as e:
log.error(f"Error downloading/extracting AL extension: {e}")
return False
@classmethod
def _setup_runtime_dependencies(cls, config: LanguageServerConfig, solidlsp_settings: SolidLSPSettings) -> str:
"""
Setup runtime dependencies for AL Language Server and return the command to start the server.
This method handles the complete setup process:
1. Checks for existing AL extension installations
2. Downloads from VS Code marketplace if not found
3. Configures executable permissions on Unix systems
4. Returns the properly formatted command string
The AL Language Server executable is located in different paths based on the platform:
- Windows: bin/win32/Microsoft.Dynamics.Nav.EditorServices.Host.exe
- Linux: bin/linux/Microsoft.Dynamics.Nav.EditorServices.Host
- macOS: bin/darwin/Microsoft.Dynamics.Nav.EditorServices.Host
"""
system = platform.system()
# Find existing extension or download if needed
extension_path = cls._find_al_extension(solidlsp_settings)
if extension_path is None:
log.info("AL extension not found on disk, attempting to download...")
extension_path = cls._download_and_install_al_extension(solidlsp_settings)
if extension_path is None:
raise RuntimeError(
"Failed to locate or download AL Language Server. Please either:\n"
"1. Set AL_EXTENSION_PATH environment variable to the AL extension directory\n"
"2. Install the AL extension in VS Code (ms-dynamics-smb.al)\n"
"3. Ensure internet connection for automatic download"
)
# Build executable path based on platform
executable_path = cls._get_executable_path(extension_path, system)
if not os.path.exists(executable_path):
raise RuntimeError(f"AL Language Server executable not found at: {executable_path}")
# Prepare and return the executable command
return cls._prepare_executable(executable_path, system)
@classmethod
def _find_al_extension(cls, solidlsp_settings: SolidLSPSettings) -> str | None:
"""
Find AL extension in various locations.
Search order:
1. Environment variable (AL_EXTENSION_PATH)
2. Default download location (~/.serena/ls_resources/al-extension)
3. VS Code installed extensions
Returns:
Path to AL extension directory or None if not found
"""
# Check environment variable
env_path = os.environ.get("AL_EXTENSION_PATH")
if env_path and os.path.exists(env_path):
log.debug(f"Found AL extension via AL_EXTENSION_PATH: {env_path}")
return env_path
elif env_path:
log.warning(f"AL_EXTENSION_PATH set but directory not found: {env_path}")
# Check default download location
default_path = os.path.join(cls.ls_resources_dir(solidlsp_settings), "al-extension", "extension")
if os.path.exists(default_path):
log.debug(f"Found AL extension in default location: {default_path}")
return default_path
# Search VS Code extensions
vscode_path = cls._find_al_extension_in_vscode()
if vscode_path:
log.debug(f"Found AL extension in VS Code: {vscode_path}")
return vscode_path
log.debug("AL extension not found in any known location")
return None
@classmethod
def _download_and_install_al_extension(cls, solidlsp_settings: SolidLSPSettings) -> str | None:
"""
Download and install AL extension from VS Code marketplace.
Returns:
Path to installed extension or None if download failed
"""
al_extension_dir = os.path.join(cls.ls_resources_dir(solidlsp_settings), "al-extension")
# AL extension version - using latest stable version
AL_VERSION = "latest"
url = f"https://marketplace.visualstudio.com/_apis/public/gallery/publishers/ms-dynamics-smb/vsextensions/al/{AL_VERSION}/vspackage"
log.info(f"Downloading AL extension from: {url}")
if cls._download_al_extension(url, al_extension_dir):
extension_path = os.path.join(al_extension_dir, "extension")
if os.path.exists(extension_path):
log.info("AL extension downloaded and installed successfully")
return extension_path
else:
log.error(f"Download completed but extension not found at: {extension_path}")
else:
log.error("Failed to download AL extension from marketplace")
return None
@classmethod
def _get_executable_path(cls, extension_path: str, system: str) -> str:
"""
Build platform-specific executable path.
Args:
extension_path: Path to AL extension directory
system: Operating system name
Returns:
Full path to executable
"""
if system == "Windows":
return os.path.join(extension_path, "bin", "win32", "Microsoft.Dynamics.Nav.EditorServices.Host.exe")
elif system == "Linux":
return os.path.join(extension_path, "bin", "linux", "Microsoft.Dynamics.Nav.EditorServices.Host")
elif system == "Darwin":
return os.path.join(extension_path, "bin", "darwin", "Microsoft.Dynamics.Nav.EditorServices.Host")
else:
raise RuntimeError(f"Unsupported platform: {system}")
@classmethod
def _prepare_executable(cls, executable_path: str, system: str) -> str:
"""
Prepare the executable by setting permissions and handling path quoting.
Args:
executable_path: Path to the executable
system: Operating system name
logger: Logger instance
Returns:
Properly formatted command string
"""
# Make sure executable has proper permissions on Unix-like systems
if system in ["Linux", "Darwin"]:
st = os.stat(executable_path)
os.chmod(executable_path, st.st_mode | stat.S_IEXEC)
log.debug(f"Set execute permission on: {executable_path}")
log.info(f"Using AL Language Server executable: {executable_path}")
# The AL Language Server uses stdio for LSP communication by default
# Use the utility function to handle Windows path quoting
return quote_windows_path(executable_path)
@classmethod
def _get_language_server_command_fallback(cls) -> str:
"""
Get the command to start the AL language server.
Returns:
Command string to launch the AL language server
Raises:
RuntimeError: If AL extension cannot be found
"""
# Check if AL extension path is configured via environment variable
al_extension_path = os.environ.get("AL_EXTENSION_PATH")
if not al_extension_path:
# Try to find the extension in the current working directory
# (for development/testing when extension is in the serena repo)
cwd_path = Path.cwd()
potential_extension = None
# Look for ms-dynamics-smb.al-* directories
for item in cwd_path.iterdir():
if item.is_dir() and item.name.startswith("ms-dynamics-smb.al-"):
potential_extension = item
break
if potential_extension:
al_extension_path = str(potential_extension)
log.debug(f"Found AL extension in current directory: {al_extension_path}")
else:
# Try to find in common VS Code extension locations
al_extension_path = cls._find_al_extension_in_vscode()
if not al_extension_path:
raise RuntimeError(
"AL Language Server not found. Please either:\n"
"1. Set AL_EXTENSION_PATH environment variable to the VS Code AL extension directory\n"
"2. Install the AL extension in VS Code (ms-dynamics-smb.al)\n"
"3. Place the extension directory in the current working directory"
)
# Determine platform-specific executable
system = platform.system()
if system == "Windows":
executable = os.path.join(al_extension_path, "bin", "win32", "Microsoft.Dynamics.Nav.EditorServices.Host.exe")
elif system == "Linux":
executable = os.path.join(al_extension_path, "bin", "linux", "Microsoft.Dynamics.Nav.EditorServices.Host")
elif system == "Darwin":
executable = os.path.join(al_extension_path, "bin", "darwin", "Microsoft.Dynamics.Nav.EditorServices.Host")
else:
raise RuntimeError(f"Unsupported platform: {system}")
# Verify executable exists
if not os.path.exists(executable):
raise RuntimeError(
f"AL Language Server executable not found at: {executable}\nPlease ensure the AL extension is properly installed."
)
# Make sure executable has proper permissions on Unix-like systems
if system in ["Linux", "Darwin"]:
st = os.stat(executable)
os.chmod(executable, st.st_mode | stat.S_IEXEC)
log.info(f"Using AL Language Server executable: {executable}")
# The AL Language Server uses stdio for LSP communication (no --stdio flag needed)
# Use the utility function to handle Windows path quoting
return quote_windows_path(executable)
@classmethod
def _find_al_extension_in_vscode(cls) -> str | None:
"""
Try to find AL extension in common VS Code extension locations.
Returns:
Path to AL extension directory or None if not found
"""
home = Path.home()
possible_paths = []
# Common VS Code extension paths
if platform.system() == "Windows":
possible_paths.extend(
[
home / ".vscode" / "extensions",
home / ".vscode-insiders" / "extensions",
Path(os.environ.get("APPDATA", "")) / "Code" / "User" / "extensions",
Path(os.environ.get("APPDATA", "")) / "Code - Insiders" / "User" / "extensions",
]
)
else:
possible_paths.extend(
[
home / ".vscode" / "extensions",
home / ".vscode-server" / "extensions",
home / ".vscode-insiders" / "extensions",
]
)
for base_path in possible_paths:
if base_path.exists():
log.debug(f"Searching for AL extension in: {base_path}")
# Look for AL extension directories
for item in base_path.iterdir():
if item.is_dir() and item.name.startswith("ms-dynamics-smb.al-"):
log.debug(f"Found AL extension at: {item}")
return str(item)
return None
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> dict:
"""
Returns the initialize params for the AL Language Server.
"""
# Ensure we have an absolute path for URI generation
repository_path = pathlib.Path(repository_absolute_path).resolve()
root_uri = repository_path.as_uri()
# AL requires extensive capabilities based on VS Code trace
initialize_params = {
"processId": os.getpid(),
"rootPath": str(repository_path),
"rootUri": root_uri,
"capabilities": {
"workspace": {
"applyEdit": True,
"workspaceEdit": {
"documentChanges": True,
"resourceOperations": ["create", "rename", "delete"],
"failureHandling": "textOnlyTransactional",
"normalizesLineEndings": True,
},
"configuration": True,
"didChangeWatchedFiles": {"dynamicRegistration": True},
"symbol": {"dynamicRegistration": True, "symbolKind": {"valueSet": list(range(1, 27))}},
"executeCommand": {"dynamicRegistration": True},
"didChangeConfiguration": {"dynamicRegistration": True},
"workspaceFolders": True,
},
"textDocument": {
"synchronization": {"dynamicRegistration": True, "willSave": True, "willSaveWaitUntil": True, "didSave": True},
"completion": {
"dynamicRegistration": True,
"contextSupport": True,
"completionItem": {
"snippetSupport": True,
"commitCharactersSupport": True,
"documentationFormat": ["markdown", "plaintext"],
"deprecatedSupport": True,
"preselectSupport": True,
},
},
"hover": {"dynamicRegistration": True, "contentFormat": ["markdown", "plaintext"]},
"definition": {"dynamicRegistration": True, "linkSupport": True},
"references": {"dynamicRegistration": True},
"documentHighlight": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"symbolKind": {"valueSet": list(range(1, 27))},
"hierarchicalDocumentSymbolSupport": True,
},
"codeAction": {"dynamicRegistration": True},
"formatting": {"dynamicRegistration": True},
"rangeFormatting": {"dynamicRegistration": True},
"rename": {"dynamicRegistration": True, "prepareSupport": True},
},
"window": {
"showMessage": {"messageActionItem": {"additionalPropertiesSupport": True}},
"showDocument": {"support": True},
"workDoneProgress": True,
},
},
"trace": "verbose",
"workspaceFolders": [{"uri": root_uri, "name": repository_path.name}],
}
return initialize_params
@override
def _start_server(self) -> None:
"""
Starts the AL Language Server process and initializes it.
This method sets up custom notification handlers for AL-specific messages
before starting the server. The AL server sends various notifications
during initialization and project loading that need to be handled.
"""
# Set up event handlers
def do_nothing(params: str) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"AL LSP: window/logMessage: {msg}")
def publish_diagnostics(params: dict) -> None:
# AL server publishes diagnostics during initialization
uri = params.get("uri", "")
diagnostics = params.get("diagnostics", [])
log.debug(f"AL LSP: Diagnostics for {uri}: {len(diagnostics)} issues")
def handle_al_notifications(params: dict) -> None:
# AL server sends custom notifications during project loading
log.debug("AL LSP: Notification received")
# Register handlers for AL-specific notifications
# These notifications are sent by the AL server during initialization and operation
self.server.on_notification("window/logMessage", window_log_message) # Server log messages
self.server.on_notification("textDocument/publishDiagnostics", publish_diagnostics) # Compilation diagnostics
self.server.on_notification("$/progress", do_nothing) # Progress notifications during loading
self.server.on_notification("al/refreshExplorerObjects", handle_al_notifications) # AL-specific object updates
# Start the server process
log.info("Starting AL Language Server process")
self.server.start()
# Send initialize request
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to AL LSP server and awaiting response")
# Send initialize and wait for response
resp = self.server.send_request("initialize", initialize_params)
if resp is None:
raise RuntimeError("AL Language Server initialization failed - no response")
log.info("AL Language Server initialized successfully")
# Send initialized notification
self.server.send_notification("initialized", {})
log.info("Sent initialized notification")
@override
def start(self) -> "ALLanguageServer":
"""
Start the AL Language Server with special initialization.
"""
# Call parent start method
super().start()
# AL-specific post-initialization
self._post_initialize_al_workspace()
# Note: set_active_workspace() can be called manually if needed for multi-workspace scenarios
# We don't call it automatically to avoid issues during single-workspace initialization
return self
def _post_initialize_al_workspace(self) -> None:
"""
Post-initialization setup for AL Language Server.
The AL server requires additional setup after initialization:
1. Send workspace configuration - provides AL settings and paths
2. Open app.json to trigger project loading - AL uses app.json to identify project structure
3. Optionally wait for project to be loaded if supported
This special initialization sequence is unique to AL and necessary for proper
symbol resolution and navigation features.
"""
# No sleep needed - server is already initialized
# Send workspace configuration first
# This tells AL about assembly paths, package caches, and code analysis settings
try:
self.server.send_notification(
"workspace/didChangeConfiguration",
{
"settings": {
"workspacePath": self.repository_root_path,
"alResourceConfigurationSettings": {
"assemblyProbingPaths": ["./.netpackages"],
"codeAnalyzers": [],
"enableCodeAnalysis": False,
"backgroundCodeAnalysis": "Project",
"packageCachePaths": ["./.alpackages"],
"ruleSetPath": None,
"enableCodeActions": True,
"incrementalBuild": False,
"outputAnalyzerStatistics": True,
"enableExternalRulesets": True,
},
"setActiveWorkspace": True,
"expectedProjectReferenceDefinitions": [],
"activeWorkspaceClosure": [self.repository_root_path],
}
},
)
log.debug("Sent workspace configuration")
except Exception as e:
log.warning(f"Failed to send workspace config: {e}")
# Check if app.json exists and open it
# app.json is the AL project manifest file (similar to package.json for Node.js)
# Opening it triggers AL to load the project and index all AL files
app_json_path = Path(self.repository_root_path) / "app.json"
if app_json_path.exists():
try:
with open(app_json_path, encoding="utf-8") as f:
app_json_content = f.read()
# Use forward slashes for URI
app_json_uri = app_json_path.as_uri()
# Send textDocument/didOpen for app.json
self.server.send_notification(
"textDocument/didOpen",
{"textDocument": {"uri": app_json_uri, "languageId": "json", "version": 1, "text": app_json_content}},
)
log.debug(f"Opened app.json: {app_json_uri}")
except Exception as e:
log.warning(f"Failed to open app.json: {e}")
# Try to set active workspace (AL-specific custom LSP request)
# This is optional and may not be supported by all AL server versions
workspace_uri = Path(self.repository_root_path).resolve().as_uri()
try:
result = self.server.send_request(
"al/setActiveWorkspace",
{
"currentWorkspaceFolderPath": {"uri": workspace_uri, "name": Path(self.repository_root_path).name, "index": 0},
"settings": {
"workspacePath": self.repository_root_path,
"setActiveWorkspace": True,
},
"timeout": 2, # Quick timeout since this is optional
},
)
log.debug(f"Set active workspace result: {result}")
except Exception as e:
# This is a custom AL request, not critical if it fails
log.debug(f"Failed to set active workspace (non-critical): {e}")
# Check if project supports load status check (optional)
# Many AL server versions don't support this, so we use a short timeout
# and continue regardless of the result
self._wait_for_project_load(timeout=3)
@override
def is_ignored_dirname(self, dirname: str) -> bool:
"""
Define AL-specific directories to ignore during file scanning.
These directories contain generated files, dependencies, or cache data
that should not be analyzed for symbols.
Args:
dirname: Directory name to check
Returns:
True if directory should be ignored
"""
al_ignore_dirs = {
".alpackages", # AL package cache - downloaded dependencies
".alcache", # AL compiler cache - intermediate compilation files
".altemplates", # AL templates - code generation templates
".snapshots", # Test snapshots - test result snapshots
"out", # Compiled output - generated .app files
".vscode", # VS Code settings - editor configuration
"Reference", # Reference assemblies - .NET dependencies
".netpackages", # .NET packages - NuGet packages for AL
"bin", # Binary output - compiled binaries
"obj", # Object files - intermediate build artifacts
}
# Check parent class ignore list first, then AL-specific
return super().is_ignored_dirname(dirname) or dirname in al_ignore_dirs
@override
def request_full_symbol_tree(self, within_relative_path: str | None = None) -> list[UnifiedSymbolInformation]:
"""
Override to handle AL's requirement of opening files before requesting symbols.
The AL Language Server requires files to be explicitly opened via textDocument/didOpen
before it can provide meaningful symbols. Without this, it only returns directory symbols.
This is different from most language servers which can provide symbols for unopened files.
This method:
1. Scans the repository for all AL files (.al and .dal extensions)
2. Opens each file with the AL server
3. Requests symbols for each file
4. Combines all symbols into a hierarchical tree structure
5. Closes the files to free resources
Args:
within_relative_path: Restrict search to this file or directory path
include_body: Whether to include symbol body content
Returns:
Full symbol tree with all AL symbols from opened files organized by directory
"""
log.debug("AL: Starting request_full_symbol_tree with file opening")
# Determine the root path for scanning
if within_relative_path is not None:
within_abs_path = os.path.join(self.repository_root_path, within_relative_path)
if not os.path.exists(within_abs_path):
raise FileNotFoundError(f"File or directory not found: {within_abs_path}")
if os.path.isfile(within_abs_path):
# Single file case - use parent class implementation
root_nodes = self.request_document_symbols(within_relative_path).root_symbols
return root_nodes
# Directory case - scan within this directory
scan_root = Path(within_abs_path)
else:
# Scan entire repository
scan_root = Path(self.repository_root_path)
# For AL, we always need to open files to get symbols
al_files = []
# Walk through the repository to find all AL files
for root, dirs, files in os.walk(scan_root):
# Skip ignored directories
dirs[:] = [d for d in dirs if not self.is_ignored_dirname(d)]
# Find AL files
for file in files:
if file.endswith((".al", ".dal")):
file_path = Path(root) / file
# Use forward slashes for consistent paths
try:
relative_path = str(file_path.relative_to(self.repository_root_path)).replace("\\", "/")
al_files.append((file_path, relative_path))
except ValueError:
# File is outside repository root, skip it
continue
log.debug(f"AL: Found {len(al_files)} AL files")
if not al_files:
log.warning("AL: No AL files found in repository")
return []
# Collect all symbols from all files
all_file_symbols: list[UnifiedSymbolInformation] = []
file_symbol: UnifiedSymbolInformation
for file_path, relative_path in al_files:
try:
# Use our overridden request_document_symbols which handles opening
log.debug(f"AL: Getting symbols for {relative_path}")
all_syms, root_syms = self.request_document_symbols(relative_path).get_all_symbols_and_roots()
if root_syms:
# Create a file-level symbol containing the document symbols
file_symbol = {
"name": file_path.stem, # Just the filename without extension
"kind": SymbolKind.File,
"children": root_syms,
"location": {
"uri": file_path.as_uri(),
"relativePath": relative_path,
"absolutePath": str(file_path),
"range": {"start": {"line": 0, "character": 0}, "end": {"line": 0, "character": 0}},
},
}
all_file_symbols.append(file_symbol)
log.debug(f"AL: Added {len(root_syms)} symbols from {relative_path}")
elif all_syms:
# If we only got all_syms but not root, use all_syms
file_symbol = {
"name": file_path.stem,
"kind": SymbolKind.File,
"children": all_syms,
"location": {
"uri": file_path.as_uri(),
"relativePath": relative_path,
"absolutePath": str(file_path),
"range": {"start": {"line": 0, "character": 0}, "end": {"line": 0, "character": 0}},
},
}
all_file_symbols.append(file_symbol)
log.debug(f"AL: Added {len(all_syms)} symbols from {relative_path}")
except Exception as e:
log.warning(f"AL: Failed to get symbols for {relative_path}: {e}")
if all_file_symbols:
log.debug(f"AL: Returning symbols from {len(all_file_symbols)} files")
# Group files by directory
directory_structure: dict[str, list] = {}
for file_symbol in all_file_symbols:
rel_path = file_symbol["location"]["relativePath"]
assert rel_path is not None
path_parts = rel_path.split("/")
if len(path_parts) > 1:
# File is in a subdirectory
dir_path = "/".join(path_parts[:-1])
if dir_path not in directory_structure:
directory_structure[dir_path] = []
directory_structure[dir_path].append(file_symbol)
else:
# File is in root
if "." not in directory_structure:
directory_structure["."] = []
directory_structure["."].append(file_symbol)
# Build hierarchical structure
result = []
repo_path = Path(self.repository_root_path)
for dir_path, file_symbols in directory_structure.items():
if dir_path == ".":
# Root level files
result.extend(file_symbols)
else:
# Create directory symbol
dir_symbol = {
"name": Path(dir_path).name,
"kind": SymbolKind.Package, # Package/Directory
"children": file_symbols,
"location": {
"relativePath": dir_path,
"absolutePath": str(repo_path / dir_path),
"range": {"start": {"line": 0, "character": 0}, "end": {"line": 0, "character": 0}},
},
}
result.append(dir_symbol)
return result
else:
log.warning("AL: No symbols found in any files")
return []
# ===== Phase 1: Custom AL Command Implementations =====
@override
def _send_definition_request(self, definition_params: DefinitionParams) -> Definition | list[LocationLink] | None:
"""
Override to use AL's custom gotodefinition command.
AL Language Server uses 'al/gotodefinition' instead of the standard
'textDocument/definition' request. This custom command provides better
navigation for AL-specific constructs like table extensions, page extensions,
and codeunit references.
If the custom command fails, we fall back to the standard LSP method.
"""
# Convert standard params to AL format (same structure, different method)
al_params = {"textDocument": definition_params["textDocument"], "position": definition_params["position"]}
try:
# Use custom AL command instead of standard LSP
response = self.server.send_request("al/gotodefinition", al_params)
log.debug(f"AL gotodefinition response: {response}")
return response # type: ignore[return-value]
except Exception as e:
log.warning(f"Failed to use al/gotodefinition, falling back to standard: {e}")
# Fallback to standard LSP method if custom command fails
return super()._send_definition_request(definition_params)
def check_project_loaded(self) -> bool:
"""
Check if AL project closure is fully loaded.
Uses AL's custom 'al/hasProjectClosureLoadedRequest' to determine if
the project and all its dependencies have been fully loaded and indexed.
This is important because AL operations may fail or return incomplete
results if the project is still loading.
Returns:
bool: True if project is loaded, False otherwise
"""
if not hasattr(self, "server") or not self.server_started:
log.debug("Cannot check project load - server not started")
return False
# Check if we've already determined this request isn't supported
if not self._project_load_check_supported:
return True # Assume loaded if check isn't supported
try:
# Use a very short timeout since this is just a status check
response = self.server.send_request("al/hasProjectClosureLoadedRequest", {"timeout": 1})
# Response can be boolean directly, dict with 'loaded' field, or None
if isinstance(response, bool):
return response
elif isinstance(response, dict):
return response.get("loaded", False)
elif response is None:
# None typically means the project is still loading
log.debug("Project load check returned None")
return False
else:
log.debug(f"Unexpected response type for project load check: {type(response)}")
return False
except Exception as e:
# Mark as unsupported to avoid repeated failed attempts
self._project_load_check_supported = False
log.debug(f"Project load check not supported by this AL server version: {e}")
# Assume loaded if we can't check
return True
def _wait_for_project_load(self, timeout: int = 3) -> bool:
"""
Wait for project to be fully loaded.
Polls the AL server to check if the project is loaded.
This is optional as not all AL server versions support this check.
We use a short timeout and continue regardless of the result.
Args:
timeout: Maximum time to wait in seconds (default 3s)
Returns:
bool: True if project loaded within timeout, False otherwise
"""
start_time = time.time()
log.debug(f"Checking AL project load status (timeout: {timeout}s)...")
while time.time() - start_time < timeout:
if self.check_project_loaded():
elapsed = time.time() - start_time
log.info(f"AL project fully loaded after {elapsed:.1f}s")
return True
time.sleep(0.5)
log.debug(f"Project load check timed out after {timeout}s (non-critical)")
return False
def set_active_workspace(self, workspace_uri: str | None = None) -> None:
"""
Set the active AL workspace.
This is important when multiple workspaces exist to ensure operations
target the correct workspace. The AL server can handle multiple projects
simultaneously, but only one can be "active" at a time for operations
like symbol search and navigation.
This uses the custom 'al/setActiveWorkspace' LSP command.
Args:
workspace_uri: URI of workspace to set as active, or None to use repository root
"""
if not hasattr(self, "server") or not self.server_started:
log.debug("Cannot set active workspace - server not started")
return
if workspace_uri is None:
workspace_uri = Path(self.repository_root_path).resolve().as_uri()
params = {"workspaceUri": workspace_uri}
try:
self.server.send_request("al/setActiveWorkspace", params)
log.info(f"Set active workspace to: {workspace_uri}")
except Exception as e:
log.warning(f"Failed to set active workspace: {e}")
# Non-critical error, continue operation
@override
def request_document_symbols(self, relative_file_path: str, file_buffer: LSPFileBuffer | None = None) -> DocumentSymbols:
"""
Override to normalize AL symbol names by stripping object type and ID metadata.
AL Language Server returns symbol names with full object format like
'Table 50000 "TEST Customer"', but symbol names should be pure without metadata.
This follows the same pattern as Java LS which strips type information from names.
Metadata (object type, ID) is available via the hover LSP method when using
include_info=True in find_symbol.
"""
# Normalize path separators for cross-platform compatibility (backslash → forward slash)
relative_file_path = self._normalize_path(relative_file_path)
# Get symbols from parent implementation
document_symbols = super().request_document_symbols(relative_file_path, file_buffer=file_buffer)
# Normalize names by stripping AL object metadata, storing originals for hover
def normalize_name(symbol: UnifiedSymbolInformation) -> None:
original_name = symbol["name"]
normalized_name = self._extract_al_display_name(original_name)
# Store original name if it was normalized (for hover injection)
# Only store if we have valid position data to avoid false matches at (0, 0)
if original_name != normalized_name:
sel_range = symbol.get("selectionRange")
if sel_range:
start = sel_range.get("start")
if start and "line" in start and "character" in start:
line = start["line"]
char = start["character"]
self._al_original_names[(relative_file_path, line, char)] = original_name
symbol["name"] = normalized_name
# Process children recursively
if symbol.get("children"):
for child in symbol["children"]:
normalize_name(child)
# Apply to all root symbols
for sym in document_symbols.root_symbols:
normalize_name(sym)
return document_symbols
@override
def request_hover(
self, relative_file_path: str, line: int, column: int, file_buffer: LSPFileBuffer | None = None
) -> ls_types.Hover | None:
"""
Override to inject original AL object name (with type and ID) into hover responses.
When hovering over a symbol whose name was normalized, we prepend the original
full name (e.g., 'Table 50000 "TEST Customer"') to the hover content.
"""
# Normalize path separators for cross-platform compatibility (backslash → forward slash)
relative_file_path = self._normalize_path(relative_file_path)
hover = super().request_hover(relative_file_path, line, column, file_buffer=file_buffer)
if hover is None:
return None
# Check if we have an original name for this position
original_name = self._al_original_names.get((relative_file_path, line, column))
if original_name and "contents" in hover:
contents = hover["contents"]
if isinstance(contents, dict) and "value" in contents:
# Prepend the original full name to the hover content
prefix = f"**{original_name}**\n\n---\n\n"
contents["value"] = prefix + contents["value"]
return hover
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/al_language_server.py",
"license": "MIT License",
"lines": 891,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/bash_language_server.py | """
Provides Bash specific instantiation of the LanguageServer class using bash-language-server.
Contains various configurations and settings specific to Bash scripting.
"""
import logging
import os
import pathlib
import shutil
import threading
from solidlsp.language_servers.common import RuntimeDependency, RuntimeDependencyCollection
from solidlsp.ls import (
DocumentSymbols,
LanguageServerDependencyProvider,
LanguageServerDependencyProviderSinglePath,
LSPFileBuffer,
SolidLanguageServer,
)
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class BashLanguageServer(SolidLanguageServer):
"""
Provides Bash specific instantiation of the LanguageServer class using bash-language-server.
Contains various configurations and settings specific to Bash scripting.
"""
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Creates a BashLanguageServer instance. This class is not meant to be instantiated directly.
Use LanguageServer.create() instead.
"""
super().__init__(
config,
repository_root_path,
None,
"bash",
solidlsp_settings,
)
self.server_ready = threading.Event()
self.initialize_searcher_command_available = threading.Event()
def _create_dependency_provider(self) -> LanguageServerDependencyProvider:
return self.DependencyProvider(self._custom_settings, self._ls_resources_dir)
class DependencyProvider(LanguageServerDependencyProviderSinglePath):
def _get_or_install_core_dependency(self) -> str:
"""
Setup runtime dependencies for Bash Language Server and return the command to start the server.
"""
# Verify both node and npm are installed
is_node_installed = shutil.which("node") is not None
assert is_node_installed, "node is not installed or isn't in PATH. Please install NodeJS and try again."
is_npm_installed = shutil.which("npm") is not None
assert is_npm_installed, "npm is not installed or isn't in PATH. Please install npm and try again."
deps = RuntimeDependencyCollection(
[
RuntimeDependency(
id="bash-language-server",
description="bash-language-server package",
command="npm install --prefix ./ bash-language-server@5.6.0",
platform_id="any",
),
]
)
# Install bash-language-server if not already installed
bash_ls_dir = os.path.join(self._ls_resources_dir, "bash-lsp")
bash_executable_path = os.path.join(bash_ls_dir, "node_modules", ".bin", "bash-language-server")
# Handle Windows executable extension
if os.name == "nt":
bash_executable_path += ".cmd"
if not os.path.exists(bash_executable_path):
log.info(f"Bash Language Server executable not found at {bash_executable_path}. Installing...")
deps.install(bash_ls_dir)
log.info("Bash language server dependencies installed successfully")
if not os.path.exists(bash_executable_path):
raise FileNotFoundError(
f"bash-language-server executable not found at {bash_executable_path}, something went wrong with the installation."
)
return bash_executable_path
def _create_launch_command(self, core_path: str) -> list[str]:
return [core_path, "start"]
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the Bash Language Server.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"locale": "en",
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"completion": {"dynamicRegistration": True, "completionItem": {"snippetSupport": True}},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
"hover": {"dynamicRegistration": True, "contentFormat": ["markdown", "plaintext"]},
"signatureHelp": {"dynamicRegistration": True},
"codeAction": {"dynamicRegistration": True},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"symbol": {"dynamicRegistration": True},
},
},
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
}
return initialize_params # type: ignore
def _start_server(self) -> None:
"""
Starts the Bash Language Server, waits for the server to be ready and yields the LanguageServer instance.
"""
def register_capability_handler(params: dict) -> None:
assert "registrations" in params
for registration in params["registrations"]:
if registration["method"] == "workspace/executeCommand":
self.initialize_searcher_command_available.set()
return
def execute_client_command_handler(params: dict) -> list:
return []
def do_nothing(params: dict) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
# Check for bash-language-server ready signals
message_text = msg.get("message", "")
if "Analyzing" in message_text or "analysis complete" in message_text.lower():
log.info("Bash language server analysis signals detected")
self.server_ready.set()
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_request("workspace/executeClientCommand", execute_client_command_handler)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting Bash server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
log.debug(f"Received initialize response from bash server: {init_response}")
# Enhanced capability checks for bash-language-server 5.6.0
assert init_response["capabilities"]["textDocumentSync"] in [1, 2] # Full or Incremental
assert "completionProvider" in init_response["capabilities"]
# Verify document symbol support is available
if "documentSymbolProvider" in init_response["capabilities"]:
log.info("Bash server supports document symbols")
else:
log.warning("Warning: Bash server does not report document symbol support")
self.server.notify.initialized({})
# Wait for server readiness with timeout
log.info("Waiting for Bash language server to be ready...")
if not self.server_ready.wait(timeout=3.0):
# Fallback: assume server is ready after timeout
# This is common. bash-language-server doesn't always send explicit ready signals. Log as info
log.info("Timeout waiting for bash server ready signal, proceeding anyway")
self.server_ready.set()
else:
log.info("Bash server initialization complete")
def request_document_symbols(self, relative_file_path: str, file_buffer: LSPFileBuffer | None = None) -> DocumentSymbols:
# Uses the standard LSP documentSymbol request which provides reliable function detection
# for all bash function syntaxes including:
# - function name() { ... } (with function keyword)
# - name() { ... } (traditional syntax)
# - Functions with various indentation levels
# - Functions with comments before/after/inside
log.debug(f"Requesting document symbols via LSP for {relative_file_path}")
# Use the standard LSP approach - bash-language-server handles all function syntaxes correctly
document_symbols = super().request_document_symbols(relative_file_path, file_buffer=file_buffer)
# Log detection results for debugging
functions = [s for s in document_symbols.iter_symbols() if s.get("kind") == 12]
log.info(f"LSP function detection for {relative_file_path}: Found {len(functions)} functions")
return document_symbols
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/bash_language_server.py",
"license": "MIT License",
"lines": 183,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/elm_language_server.py | """
Provides Elm specific instantiation of the LanguageServer class. Contains various configurations and settings specific to Elm.
"""
import logging
import os
import pathlib
import shutil
import threading
from overrides import override
from sensai.util.logging import LogTime
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
from .common import RuntimeDependency, RuntimeDependencyCollection
log = logging.getLogger(__name__)
class ElmLanguageServer(SolidLanguageServer):
"""
Provides Elm specific instantiation of the LanguageServer class. Contains various configurations and settings specific to Elm.
"""
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Creates an ElmLanguageServer instance. This class is not meant to be instantiated directly. Use LanguageServer.create() instead.
"""
elm_lsp_executable_path = self._setup_runtime_dependencies(config, solidlsp_settings)
# Resolve ELM_HOME to absolute path if it's set to a relative path
env = {}
elm_home = os.environ.get("ELM_HOME")
if elm_home:
if not os.path.isabs(elm_home):
# Convert relative ELM_HOME to absolute based on repository root
elm_home = os.path.abspath(os.path.join(repository_root_path, elm_home))
env["ELM_HOME"] = elm_home
log.info(f"Using ELM_HOME: {elm_home}")
super().__init__(
config,
repository_root_path,
ProcessLaunchInfo(cmd=elm_lsp_executable_path, cwd=repository_root_path, env=env),
"elm",
solidlsp_settings,
)
@override
def is_ignored_dirname(self, dirname: str) -> bool:
return super().is_ignored_dirname(dirname) or dirname in [
"elm-stuff",
"node_modules",
"dist",
"build",
]
@classmethod
def _setup_runtime_dependencies(cls, config: LanguageServerConfig, solidlsp_settings: SolidLSPSettings) -> list[str]:
"""
Setup runtime dependencies for Elm Language Server and return the command to start the server.
"""
# Check if elm-language-server is already installed globally
system_elm_ls = shutil.which("elm-language-server")
if system_elm_ls:
log.info(f"Found system-installed elm-language-server at {system_elm_ls}")
return [system_elm_ls, "--stdio"]
# Verify node and npm are installed
is_node_installed = shutil.which("node") is not None
assert is_node_installed, "node is not installed or isn't in PATH. Please install NodeJS and try again."
is_npm_installed = shutil.which("npm") is not None
assert is_npm_installed, "npm is not installed or isn't in PATH. Please install npm and try again."
deps = RuntimeDependencyCollection(
[
RuntimeDependency(
id="elm-language-server",
description="@elm-tooling/elm-language-server package",
command=["npm", "install", "--prefix", "./", "@elm-tooling/elm-language-server@2.8.0"],
platform_id="any",
),
]
)
# Install elm-language-server if not already installed
elm_ls_dir = os.path.join(cls.ls_resources_dir(solidlsp_settings), "elm-lsp")
elm_ls_executable_path = os.path.join(elm_ls_dir, "node_modules", ".bin", "elm-language-server")
if not os.path.exists(elm_ls_executable_path):
log.info(f"Elm Language Server executable not found at {elm_ls_executable_path}. Installing...")
with LogTime("Installation of Elm language server dependencies", logger=log):
deps.install(elm_ls_dir)
if not os.path.exists(elm_ls_executable_path):
raise FileNotFoundError(
f"elm-language-server executable not found at {elm_ls_executable_path}, something went wrong with the installation."
)
return [elm_ls_executable_path, "--stdio"]
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the Elm Language Server.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"locale": "en",
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"completion": {"dynamicRegistration": True, "completionItem": {"snippetSupport": True}},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
"hover": {"dynamicRegistration": True, "contentFormat": ["markdown", "plaintext"]},
"codeAction": {"dynamicRegistration": True},
"rename": {"dynamicRegistration": True},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"symbol": {"dynamicRegistration": True},
},
},
"initializationOptions": {
"elmPath": shutil.which("elm") or "elm",
"elmFormatPath": shutil.which("elm-format") or "elm-format",
"elmTestPath": shutil.which("elm-test") or "elm-test",
"skipInstallPackageConfirmation": True,
"onlyUpdateDiagnosticsOnSave": False,
},
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
}
return initialize_params # type: ignore[return-value]
def _start_server(self) -> None:
"""
Starts the Elm Language Server, waits for the server to be ready and yields the LanguageServer instance.
"""
workspace_ready = threading.Event()
def do_nothing(params: dict) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
def on_diagnostics(params: dict) -> None:
# Receiving diagnostics indicates the workspace has been scanned
log.info("LSP: Received diagnostics notification, workspace is ready")
workspace_ready.set()
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", on_diagnostics)
log.info("Starting Elm server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
# Elm-specific capability checks
assert "textDocumentSync" in init_response["capabilities"]
assert "completionProvider" in init_response["capabilities"]
assert "definitionProvider" in init_response["capabilities"]
assert "referencesProvider" in init_response["capabilities"]
assert "documentSymbolProvider" in init_response["capabilities"]
self.server.notify.initialized({})
log.info("Elm server initialized, waiting for workspace scan...")
# Wait for workspace to be scanned (indicated by receiving diagnostics)
if workspace_ready.wait(timeout=30.0):
log.info("Elm server workspace scan completed")
else:
log.warning("Timeout waiting for Elm workspace scan, proceeding anyway")
log.info("Elm server ready")
@override
def _get_wait_time_for_cross_file_referencing(self) -> float:
return 1.0
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/elm_language_server.py",
"license": "MIT License",
"lines": 172,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/erlang_language_server.py | """Erlang Language Server implementation using Erlang LS."""
import logging
import os
import shutil
import subprocess
import threading
import time
from overrides import override
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class ErlangLanguageServer(SolidLanguageServer):
"""Language server for Erlang using Erlang LS."""
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Creates an ErlangLanguageServer instance. This class is not meant to be instantiated directly.
Use LanguageServer.create() instead.
"""
self.erlang_ls_path = shutil.which("erlang_ls")
if not self.erlang_ls_path:
raise RuntimeError("Erlang LS not found. Install from: https://github.com/erlang-ls/erlang_ls")
if not self._check_erlang_installation():
raise RuntimeError("Erlang/OTP not found. Install from: https://www.erlang.org/downloads")
super().__init__(
config,
repository_root_path,
ProcessLaunchInfo(cmd=[self.erlang_ls_path, "--transport", "stdio"], cwd=repository_root_path),
"erlang",
solidlsp_settings,
)
# Add server readiness tracking like Elixir
self.server_ready = threading.Event()
# Set generous timeout for Erlang LS initialization
self.set_request_timeout(120.0)
def _check_erlang_installation(self) -> bool:
"""Check if Erlang/OTP is available."""
try:
result = subprocess.run(["erl", "-version"], check=False, capture_output=True, text=True, timeout=10)
return result.returncode == 0
except (subprocess.SubprocessError, FileNotFoundError):
return False
@classmethod
def _get_erlang_version(cls) -> str | None:
"""Get the installed Erlang/OTP version or None if not found."""
try:
result = subprocess.run(["erl", "-version"], check=False, capture_output=True, text=True, timeout=10)
if result.returncode == 0:
return result.stderr.strip() # erl -version outputs to stderr
except (subprocess.SubprocessError, FileNotFoundError):
return None
return None
@classmethod
def _check_rebar3_available(cls) -> bool:
"""Check if rebar3 build tool is available."""
try:
result = subprocess.run(["rebar3", "version"], check=False, capture_output=True, text=True, timeout=10)
return result.returncode == 0
except (subprocess.SubprocessError, FileNotFoundError):
return False
def _start_server(self) -> None:
"""Start Erlang LS server process with proper initialization waiting."""
def register_capability_handler(params: dict) -> None:
return
def window_log_message(msg: dict) -> None:
"""Handle window/logMessage notifications from Erlang LS"""
message_text = msg.get("message", "")
log.info(f"LSP: window/logMessage: {message_text}")
# Look for Erlang LS readiness signals
# Common patterns: "Started Erlang LS", "initialized", "ready"
readiness_signals = [
"Started Erlang LS",
"server started",
"initialized",
"ready to serve requests",
"compilation finished",
"indexing complete",
]
message_lower = message_text.lower()
for signal in readiness_signals:
if signal.lower() in message_lower:
log.info(f"Erlang LS readiness signal detected: {message_text}")
self.server_ready.set()
break
def do_nothing(params: dict) -> None:
return
def check_server_ready(params: dict) -> None:
"""Handle $/progress notifications from Erlang LS as fallback."""
value = params.get("value", {})
# Check for initialization completion progress
if value.get("kind") == "end":
message = value.get("message", "")
if any(word in message.lower() for word in ["initialized", "ready", "complete"]):
log.info("Erlang LS initialization progress completed")
# Set as fallback if no window/logMessage was received
if not self.server_ready.is_set():
self.server_ready.set()
# Set up notification handlers
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", check_server_ready)
self.server.on_notification("window/workDoneProgress/create", do_nothing)
self.server.on_notification("$/workDoneProgress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting Erlang LS server process")
self.server.start()
# Send initialize request
initialize_params = {
"processId": None,
"rootPath": self.repository_root_path,
"rootUri": f"file://{self.repository_root_path}",
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True},
"completion": {"dynamicRegistration": True},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {"dynamicRegistration": True},
"hover": {"dynamicRegistration": True},
}
},
}
log.info("Sending initialize request to Erlang LS")
init_response = self.server.send.initialize(initialize_params) # type: ignore[arg-type]
# Verify server capabilities
if "capabilities" in init_response:
log.info(f"Erlang LS capabilities: {list(init_response['capabilities'].keys())}")
self.server.notify.initialized({})
# Wait for Erlang LS to be ready - adjust timeout based on environment
is_ci = os.getenv("CI") == "true" or os.getenv("GITHUB_ACTIONS") == "true"
is_macos = os.uname().sysname == "Darwin" if hasattr(os, "uname") else False
# macOS in CI can be particularly slow for language server startup
if is_ci and is_macos:
ready_timeout = 240.0 # 4 minutes for macOS CI
env_desc = "macOS CI"
elif is_ci:
ready_timeout = 180.0 # 3 minutes for other CI
env_desc = "CI"
else:
ready_timeout = 60.0 # 1 minute for local
env_desc = "local"
log.info(f"Waiting up to {ready_timeout} seconds for Erlang LS readiness ({env_desc} environment)...")
if self.server_ready.wait(timeout=ready_timeout):
log.info("Erlang LS is ready and available for requests")
# Add settling period for indexing - adjust based on environment
settling_time = 15.0 if is_ci else 5.0
log.info(f"Allowing {settling_time} seconds for Erlang LS indexing to complete...")
time.sleep(settling_time)
log.info("Erlang LS settling period complete")
else:
# Set ready anyway and continue - Erlang LS might not send explicit ready messages
log.warning(f"Erlang LS readiness timeout reached after {ready_timeout}s, proceeding anyway (common in CI)")
self.server_ready.set()
# Still give some time for basic initialization even without explicit readiness signal
basic_settling_time = 20.0 if is_ci else 10.0
log.info(f"Allowing {basic_settling_time} seconds for basic Erlang LS initialization...")
time.sleep(basic_settling_time)
log.info("Basic Erlang LS initialization period complete")
@override
def is_ignored_dirname(self, dirname: str) -> bool:
# For Erlang projects, we should ignore:
# - _build: rebar3 build artifacts
# - deps: dependencies
# - ebin: compiled beam files
# - .rebar3: rebar3 cache
# - logs: log files
# - node_modules: if the project has JavaScript components
return super().is_ignored_dirname(dirname) or dirname in [
"_build",
"deps",
"ebin",
".rebar3",
"logs",
"node_modules",
"_checkouts",
"cover",
]
def is_ignored_filename(self, filename: str) -> bool:
"""Check if a filename should be ignored."""
# Ignore compiled BEAM files
if filename.endswith(".beam"):
return True
# Don't ignore Erlang source files, header files, or configuration files
return False
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/erlang_language_server.py",
"license": "MIT License",
"lines": 185,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/lua_ls.py | """
Provides Lua specific instantiation of the LanguageServer class using lua-language-server.
"""
import logging
import os
import pathlib
import platform
import shutil
import tarfile
import zipfile
from pathlib import Path
import requests
from overrides import override
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class LuaLanguageServer(SolidLanguageServer):
"""
Provides Lua specific instantiation of the LanguageServer class using lua-language-server.
"""
@override
def is_ignored_dirname(self, dirname: str) -> bool:
# For Lua projects, we should ignore:
# - .luarocks: package manager cache
# - lua_modules: local dependencies
# - node_modules: if the project has JavaScript components
return super().is_ignored_dirname(dirname) or dirname in [".luarocks", "lua_modules", "node_modules", "build", "dist", ".cache"]
@staticmethod
def _get_lua_ls_path() -> str | None:
"""Get the path to lua-language-server executable."""
# First check if it's in PATH
lua_ls = shutil.which("lua-language-server")
if lua_ls:
return lua_ls
# Check common installation locations
home = Path.home()
possible_paths = [
home / ".local" / "bin" / "lua-language-server",
home / ".serena" / "language_servers" / "lua" / "bin" / "lua-language-server",
Path("/usr/local/bin/lua-language-server"),
Path("/opt/lua-language-server/bin/lua-language-server"),
]
# Add Windows-specific paths
if platform.system() == "Windows":
possible_paths.extend(
[
home / "AppData" / "Local" / "lua-language-server" / "bin" / "lua-language-server.exe",
home / ".serena" / "language_servers" / "lua" / "bin" / "lua-language-server.exe",
]
)
for path in possible_paths:
if path.exists():
return str(path)
return None
@staticmethod
def _download_lua_ls() -> str:
"""Download and install lua-language-server if not present."""
system = platform.system()
machine = platform.machine().lower()
lua_ls_version = "3.15.0"
# Map platform and architecture to download URL
if system == "Linux":
if machine in ["x86_64", "amd64"]:
download_name = f"lua-language-server-{lua_ls_version}-linux-x64.tar.gz"
elif machine in ["aarch64", "arm64"]:
download_name = f"lua-language-server-{lua_ls_version}-linux-arm64.tar.gz"
else:
raise RuntimeError(f"Unsupported Linux architecture: {machine}")
elif system == "Darwin":
if machine in ["x86_64", "amd64"]:
download_name = f"lua-language-server-{lua_ls_version}-darwin-x64.tar.gz"
elif machine in ["arm64", "aarch64"]:
download_name = f"lua-language-server-{lua_ls_version}-darwin-arm64.tar.gz"
else:
raise RuntimeError(f"Unsupported macOS architecture: {machine}")
elif system == "Windows":
if machine in ["amd64", "x86_64"]:
download_name = f"lua-language-server-{lua_ls_version}-win32-x64.zip"
else:
raise RuntimeError(f"Unsupported Windows architecture: {machine}")
else:
raise RuntimeError(f"Unsupported operating system: {system}")
download_url = f"https://github.com/LuaLS/lua-language-server/releases/download/{lua_ls_version}/{download_name}"
# Create installation directory
install_dir = Path.home() / ".serena" / "language_servers" / "lua"
install_dir.mkdir(parents=True, exist_ok=True)
# Download the file
print(f"Downloading lua-language-server from {download_url}...")
response = requests.get(download_url, stream=True)
response.raise_for_status()
# Save and extract
download_path = install_dir / download_name
with open(download_path, "wb") as f:
for chunk in response.iter_content(chunk_size=8192):
f.write(chunk)
print(f"Extracting lua-language-server to {install_dir}...")
if download_name.endswith(".tar.gz"):
with tarfile.open(download_path, "r:gz") as tar:
tar.extractall(install_dir)
elif download_name.endswith(".zip"):
with zipfile.ZipFile(download_path, "r") as zip_ref:
zip_ref.extractall(install_dir)
# Clean up download file
download_path.unlink()
# Make executable on Unix systems
if system != "Windows":
lua_ls_path = install_dir / "bin" / "lua-language-server"
if lua_ls_path.exists():
lua_ls_path.chmod(0o755)
return str(lua_ls_path)
else:
lua_ls_path = install_dir / "bin" / "lua-language-server.exe"
if lua_ls_path.exists():
return str(lua_ls_path)
raise RuntimeError("Failed to find lua-language-server executable after extraction")
@staticmethod
def _setup_runtime_dependency() -> str:
"""
Check if required Lua runtime dependencies are available.
Downloads lua-language-server if not present.
"""
lua_ls_path = LuaLanguageServer._get_lua_ls_path()
if not lua_ls_path:
print("lua-language-server not found. Downloading...")
lua_ls_path = LuaLanguageServer._download_lua_ls()
print(f"lua-language-server installed at: {lua_ls_path}")
return lua_ls_path
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
lua_ls_path = self._setup_runtime_dependency()
super().__init__(
config, repository_root_path, ProcessLaunchInfo(cmd=lua_ls_path, cwd=repository_root_path), "lua", solidlsp_settings
)
self.request_id = 0
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the Lua Language Server.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"locale": "en",
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
"completion": {
"dynamicRegistration": True,
"completionItem": {
"snippetSupport": True,
"commitCharactersSupport": True,
"documentationFormat": ["markdown", "plaintext"],
"deprecatedSupport": True,
"preselectSupport": True,
},
},
"hover": {
"dynamicRegistration": True,
"contentFormat": ["markdown", "plaintext"],
},
"signatureHelp": {
"dynamicRegistration": True,
"signatureInformation": {
"documentationFormat": ["markdown", "plaintext"],
"parameterInformation": {"labelOffsetSupport": True},
},
},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"configuration": True,
"symbol": {
"dynamicRegistration": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
},
},
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
"initializationOptions": {
# Lua Language Server specific options
"runtime": {
"version": "Lua 5.4",
"path": ["?.lua", "?/init.lua"],
},
"diagnostics": {
"enable": True,
"globals": ["vim", "describe", "it", "before_each", "after_each"], # Common globals
},
"workspace": {
"library": [], # Can be extended with project-specific libraries
"checkThirdParty": False,
"userThirdParty": [],
},
"telemetry": {
"enable": False,
},
"completion": {
"enable": True,
"callSnippet": "Both",
"keywordSnippet": "Both",
},
},
}
return initialize_params # type: ignore[return-value]
def _start_server(self) -> None:
"""Start Lua Language Server process"""
def register_capability_handler(params: dict) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
def do_nothing(params: dict) -> None:
return
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting Lua Language Server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
# Verify server capabilities
assert "textDocumentSync" in init_response["capabilities"]
assert "definitionProvider" in init_response["capabilities"]
assert "documentSymbolProvider" in init_response["capabilities"]
assert "referencesProvider" in init_response["capabilities"]
self.server.notify.initialized({})
# Lua Language Server is typically ready immediately after initialization
# (no need to wait for events)
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/lua_ls.py",
"license": "MIT License",
"lines": 246,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/marksman.py | """
Provides Markdown specific instantiation of the LanguageServer class using marksman.
Contains various configurations and settings specific to Markdown.
"""
import logging
import os
import pathlib
from collections.abc import Hashable
from overrides import override
from solidlsp.ls import (
DocumentSymbols,
LanguageServerDependencyProvider,
LanguageServerDependencyProviderSinglePath,
LSPFileBuffer,
SolidLanguageServer,
)
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.ls_types import SymbolKind, UnifiedSymbolInformation
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.settings import SolidLSPSettings
from .common import RuntimeDependency, RuntimeDependencyCollection
log = logging.getLogger(__name__)
class Marksman(SolidLanguageServer):
"""
Provides Markdown specific instantiation of the LanguageServer class using marksman.
"""
class DependencyProvider(LanguageServerDependencyProviderSinglePath):
marksman_releases = "https://github.com/artempyanykh/marksman/releases/download/2024-12-18"
runtime_dependencies = RuntimeDependencyCollection(
[
RuntimeDependency(
id="marksman",
url=f"{marksman_releases}/marksman-linux-x64",
platform_id="linux-x64",
archive_type="binary",
binary_name="marksman",
),
RuntimeDependency(
id="marksman",
url=f"{marksman_releases}/marksman-linux-arm64",
platform_id="linux-arm64",
archive_type="binary",
binary_name="marksman",
),
RuntimeDependency(
id="marksman",
url=f"{marksman_releases}/marksman-macos",
platform_id="osx-x64",
archive_type="binary",
binary_name="marksman",
),
RuntimeDependency(
id="marksman",
url=f"{marksman_releases}/marksman-macos",
platform_id="osx-arm64",
archive_type="binary",
binary_name="marksman",
),
RuntimeDependency(
id="marksman",
url=f"{marksman_releases}/marksman.exe",
platform_id="win-x64",
archive_type="binary",
binary_name="marksman.exe",
),
]
)
def _get_or_install_core_dependency(self) -> str:
"""Setup runtime dependencies for marksman and return the command to start the server."""
deps = self.runtime_dependencies
dependency = deps.get_single_dep_for_current_platform()
marksman_ls_dir = self._ls_resources_dir
marksman_executable_path = deps.binary_path(marksman_ls_dir)
if not os.path.exists(marksman_executable_path):
log.info(
f"Downloading marksman from {dependency.url} to {marksman_ls_dir}",
)
deps.install(marksman_ls_dir)
if not os.path.exists(marksman_executable_path):
raise FileNotFoundError(f"Download failed? Could not find marksman executable at {marksman_executable_path}")
os.chmod(marksman_executable_path, 0o755)
return marksman_executable_path
def _create_launch_command(self, core_path: str) -> list[str]:
return [core_path, "server"]
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Creates a Marksman instance. This class is not meant to be instantiated directly.
Use LanguageServer.create() instead.
"""
super().__init__(
config,
repository_root_path,
None,
"markdown",
solidlsp_settings,
)
def _create_dependency_provider(self) -> LanguageServerDependencyProvider:
return self.DependencyProvider(self._custom_settings, self._ls_resources_dir)
@override
def is_ignored_dirname(self, dirname: str) -> bool:
return super().is_ignored_dirname(dirname) or dirname in ["node_modules", ".obsidian", ".vitepress", ".vuepress"]
def _document_symbols_cache_fingerprint(self) -> Hashable | None:
request_document_symbols_override_version = 1
return request_document_symbols_override_version
@override
def request_document_symbols(self, relative_file_path: str, file_buffer: LSPFileBuffer | None = None) -> DocumentSymbols:
"""Override to remap Marksman's heading symbol kinds from String to Namespace.
Marksman LSP returns all markdown headings (h1-h6) with SymbolKind.String (15).
This is problematic because String (15) >= Variable (13), so headings are
classified as "low-level" and filtered out of symbol overviews.
Remapping to Namespace (3) fixes this and is semantically appropriate
(headings are named sections containing other content).
"""
document_symbols = super().request_document_symbols(relative_file_path, file_buffer=file_buffer)
# NOTE: When changing this method, also update the cache fingerprint method above
def remap_heading_kinds(symbol: UnifiedSymbolInformation) -> None:
if symbol["kind"] == SymbolKind.String:
symbol["kind"] = SymbolKind.Namespace
for child in symbol.get("children", []):
remap_heading_kinds(child)
for sym in document_symbols.root_symbols:
remap_heading_kinds(sym)
return document_symbols
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the Marksman Language Server.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params: InitializeParams = { # type: ignore
"processId": os.getpid(),
"locale": "en",
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"completion": {"dynamicRegistration": True, "completionItem": {"snippetSupport": True}},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))}, # type: ignore[arg-type]
},
"hover": {"dynamicRegistration": True, "contentFormat": ["markdown", "plaintext"]}, # type: ignore[list-item]
"codeAction": {"dynamicRegistration": True},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"symbol": {"dynamicRegistration": True},
},
},
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
}
return initialize_params
def _start_server(self) -> None:
"""
Starts the Marksman Language Server and waits for it to be ready.
"""
def register_capability_handler(_params: dict) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
def do_nothing(_params: dict) -> None:
return
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting marksman server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to marksman server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
log.debug(f"Received initialize response from marksman server: {init_response}")
# Verify server capabilities
assert "textDocumentSync" in init_response["capabilities"]
assert "completionProvider" in init_response["capabilities"]
assert "definitionProvider" in init_response["capabilities"]
self.server.notify.initialized({})
# marksman is typically ready immediately after initialization
log.info("Marksman server initialization complete")
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/marksman.py",
"license": "MIT License",
"lines": 189,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/nixd_ls.py | # type: ignore
"""
Provides Nix specific instantiation of the LanguageServer class using nixd (Nix Language Server).
Note: Windows is not supported as Nix itself doesn't support Windows natively.
"""
import logging
import os
import pathlib
import platform
import shutil
import subprocess
from pathlib import Path
from overrides import override
from solidlsp import ls_types
from solidlsp.ls import DocumentSymbols, LSPFileBuffer, SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class NixLanguageServer(SolidLanguageServer):
"""
Provides Nix specific instantiation of the LanguageServer class using nixd.
"""
def _extend_nix_symbol_range_to_include_semicolon(
self, symbol: ls_types.UnifiedSymbolInformation, file_content: str
) -> ls_types.UnifiedSymbolInformation:
"""
Extend symbol range to include trailing semicolon for Nix attribute symbols.
nixd provides ranges that exclude semicolons (expression-level), but serena needs
statement-level ranges that include semicolons for proper replacement.
"""
range_info = symbol["range"]
end_line = range_info["end"]["line"]
end_char = range_info["end"]["character"]
# Split file content into lines
lines = file_content.split("\n")
if end_line >= len(lines):
return symbol
line = lines[end_line]
# Check if there's a semicolon immediately after the current range end
if end_char < len(line) and line[end_char] == ";":
# Extend range to include the semicolon
new_range = {"start": range_info["start"], "end": {"line": end_line, "character": end_char + 1}}
# Create modified symbol with extended range
extended_symbol = symbol.copy()
extended_symbol["range"] = new_range
# CRITICAL: Also update the location.range if it exists
if extended_symbol.get("location"):
location = extended_symbol["location"].copy()
if "range" in location:
location["range"] = new_range.copy()
extended_symbol["location"] = location
return extended_symbol
return symbol
@override
def request_document_symbols(self, relative_file_path: str, file_buffer: LSPFileBuffer | None = None) -> DocumentSymbols:
# Override to extend Nix symbol ranges to include trailing semicolons.
# nixd provides expression-level ranges (excluding semicolons) but serena needs
# statement-level ranges (including semicolons) for proper symbol replacement.
# Get symbols from parent implementation
document_symbols = super().request_document_symbols(relative_file_path, file_buffer=file_buffer)
# Get file content for range extension
file_content = self.language_server.retrieve_full_file_content(relative_file_path)
# Extend ranges for all symbols recursively
def extend_symbol_and_children(symbol: ls_types.UnifiedSymbolInformation) -> ls_types.UnifiedSymbolInformation:
# Extend this symbol's range
extended = self._extend_nix_symbol_range_to_include_semicolon(symbol, file_content)
# Extend children recursively
if extended.get("children"):
extended["children"] = [extend_symbol_and_children(child) for child in extended["children"]]
return extended
# Apply range extension to all symbols
extended_root_symbols = [extend_symbol_and_children(sym) for sym in document_symbols.root_symbols]
return DocumentSymbols(extended_root_symbols)
@override
def is_ignored_dirname(self, dirname: str) -> bool:
# For Nix projects, we should ignore:
# - result: nix build output symlinks
# - result-*: multiple build outputs
# - .direnv: direnv cache
return super().is_ignored_dirname(dirname) or dirname in ["result", ".direnv"] or dirname.startswith("result-")
@staticmethod
def _get_nixd_version():
"""Get the installed nixd version or None if not found."""
try:
result = subprocess.run(["nixd", "--version"], capture_output=True, text=True, check=False)
if result.returncode == 0:
# nixd outputs version like: nixd 2.0.0
return result.stdout.strip()
except FileNotFoundError:
return None
return None
@staticmethod
def _check_nixd_installed():
"""Check if nixd is installed in the system."""
return shutil.which("nixd") is not None
@staticmethod
def _get_nixd_path():
"""Get the path to nixd executable."""
# First check if it's in PATH
nixd_path = shutil.which("nixd")
if nixd_path:
return nixd_path
# Check common installation locations
home = Path.home()
possible_paths = [
home / ".local" / "bin" / "nixd",
home / ".serena" / "language_servers" / "nixd" / "nixd",
home / ".nix-profile" / "bin" / "nixd",
Path("/usr/local/bin/nixd"),
Path("/run/current-system/sw/bin/nixd"), # NixOS system profile
Path("/opt/homebrew/bin/nixd"), # Homebrew on Apple Silicon
Path("/usr/local/opt/nixd/bin/nixd"), # Homebrew on Intel Mac
]
# Add Windows-specific paths
if platform.system() == "Windows":
possible_paths.extend(
[
home / "AppData" / "Local" / "nixd" / "nixd.exe",
home / ".serena" / "language_servers" / "nixd" / "nixd.exe",
]
)
for path in possible_paths:
if path.exists():
return str(path)
return None
@staticmethod
def _install_nixd_with_nix():
"""Install nixd using nix if available."""
# Check if nix is available
if not shutil.which("nix"):
return None
print("Installing nixd using nix... This may take a few minutes.")
try:
# Try to install nixd using nix profile
result = subprocess.run(
["nix", "profile", "install", "github:nix-community/nixd"],
capture_output=True,
text=True,
check=False,
timeout=600, # 10 minute timeout for building
)
if result.returncode == 0:
# Check if nixd is now in PATH
nixd_path = shutil.which("nixd")
if nixd_path:
print(f"Successfully installed nixd at: {nixd_path}")
return nixd_path
else:
# Try nix-env as fallback
result = subprocess.run(
["nix-env", "-iA", "nixpkgs.nixd"],
capture_output=True,
text=True,
check=False,
timeout=600,
)
if result.returncode == 0:
nixd_path = shutil.which("nixd")
if nixd_path:
print(f"Successfully installed nixd at: {nixd_path}")
return nixd_path
print(f"Failed to install nixd: {result.stderr}")
except subprocess.TimeoutExpired:
print("Nix install timed out after 10 minutes")
except Exception as e:
print(f"Error installing nixd with nix: {e}")
return None
@staticmethod
def _setup_runtime_dependency():
"""
Check if required Nix runtime dependencies are available.
Attempts to install nixd if not present.
"""
# First check if Nix is available (nixd needs it at runtime)
if not shutil.which("nix"):
print("WARNING: Nix is not installed. nixd requires Nix to function properly.")
raise RuntimeError("Nix is required for nixd. Please install Nix from https://nixos.org/download.html")
nixd_path = NixLanguageServer._get_nixd_path()
if not nixd_path:
print("nixd not found. Attempting to install...")
# Try to install with nix if available
nixd_path = NixLanguageServer._install_nixd_with_nix()
if not nixd_path:
raise RuntimeError(
"nixd (Nix Language Server) is not installed.\n"
"Please install nixd using one of the following methods:\n"
" - Using Nix flakes: nix profile install github:nix-community/nixd\n"
" - From nixpkgs: nix-env -iA nixpkgs.nixd\n"
" - On macOS with Homebrew: brew install nixd\n\n"
"After installation, make sure 'nixd' is in your PATH."
)
# Verify nixd works
try:
result = subprocess.run([nixd_path, "--version"], capture_output=True, text=True, check=False, timeout=5)
if result.returncode != 0:
raise RuntimeError(f"nixd failed to run: {result.stderr}")
except Exception as e:
raise RuntimeError(f"Failed to verify nixd installation: {e}")
return nixd_path
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
nixd_path = self._setup_runtime_dependency()
super().__init__(config, repository_root_path, ProcessLaunchInfo(cmd=nixd_path, cwd=repository_root_path), "nix", solidlsp_settings)
self.request_id = 0
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for nixd.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"locale": "en",
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
"completion": {
"dynamicRegistration": True,
"completionItem": {
"snippetSupport": True,
"commitCharactersSupport": True,
"documentationFormat": ["markdown", "plaintext"],
"deprecatedSupport": True,
"preselectSupport": True,
},
},
"hover": {
"dynamicRegistration": True,
"contentFormat": ["markdown", "plaintext"],
},
"signatureHelp": {
"dynamicRegistration": True,
"signatureInformation": {
"documentationFormat": ["markdown", "plaintext"],
"parameterInformation": {"labelOffsetSupport": True},
},
},
"codeAction": {
"dynamicRegistration": True,
"codeActionLiteralSupport": {
"codeActionKind": {
"valueSet": [
"",
"quickfix",
"refactor",
"refactor.extract",
"refactor.inline",
"refactor.rewrite",
"source",
"source.organizeImports",
]
}
},
},
"rename": {"dynamicRegistration": True, "prepareSupport": True},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"configuration": True,
"symbol": {
"dynamicRegistration": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
},
},
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
"initializationOptions": {
# nixd specific options
"nixpkgs": {"expr": "import <nixpkgs> { }"},
"formatting": {"command": ["nixpkgs-fmt"]}, # or ["alejandra"] or ["nixfmt"]
"options": {
"enable": True,
"target": {
"installable": "", # Will be auto-detected from flake.nix if present
},
},
},
}
return initialize_params
def _start_server(self):
"""Start nixd server process"""
def register_capability_handler(params):
return
def window_log_message(msg):
log.info(f"LSP: window/logMessage: {msg}")
def do_nothing(params):
return
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting nixd server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
# Verify server capabilities
assert "textDocumentSync" in init_response["capabilities"]
assert "definitionProvider" in init_response["capabilities"]
assert "documentSymbolProvider" in init_response["capabilities"]
assert "referencesProvider" in init_response["capabilities"]
self.server.notify.initialized({})
# nixd server is typically ready immediately after initialization
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/nixd_ls.py",
"license": "MIT License",
"lines": 319,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/perl_language_server.py | """
Provides Perl specific instantiation of the LanguageServer class using Perl::LanguageServer.
Note: Windows is not supported as Nix itself doesn't support Windows natively.
"""
import logging
import os
import pathlib
import subprocess
import time
from typing import Any
from overrides import override
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.ls_utils import PlatformId, PlatformUtils
from solidlsp.lsp_protocol_handler.lsp_types import DidChangeConfigurationParams, InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class PerlLanguageServer(SolidLanguageServer):
"""
Provides Perl specific instantiation of the LanguageServer class using Perl::LanguageServer.
"""
@staticmethod
def _get_perl_version() -> str | None:
"""Get the installed Perl version or None if not found."""
try:
result = subprocess.run(["perl", "-v"], capture_output=True, text=True, check=False)
if result.returncode == 0:
return result.stdout.strip()
except FileNotFoundError:
return None
return None
@staticmethod
def _get_perl_language_server_version() -> str | None:
"""Get the installed Perl::LanguageServer version or None if not found."""
try:
result = subprocess.run(
["perl", "-MPerl::LanguageServer", "-e", "print $Perl::LanguageServer::VERSION"],
capture_output=True,
text=True,
check=False,
)
if result.returncode == 0:
return result.stdout.strip()
except FileNotFoundError:
return None
return None
@override
def is_ignored_dirname(self, dirname: str) -> bool:
# For Perl projects, we should ignore:
# - blib: build library directory
# - local: local Perl module installation
# - .carton: Carton dependency manager cache
# - vendor: vendored dependencies
# - _build: Module::Build output
return super().is_ignored_dirname(dirname) or dirname in ["blib", "local", ".carton", "vendor", "_build", "cover_db"]
@classmethod
def _setup_runtime_dependencies(cls) -> str:
"""
Check if required Perl runtime dependencies are available.
Raises RuntimeError with helpful message if dependencies are missing.
"""
platform_id = PlatformUtils.get_platform_id()
valid_platforms = [
PlatformId.LINUX_x64,
PlatformId.LINUX_arm64,
PlatformId.OSX,
PlatformId.OSX_x64,
PlatformId.OSX_arm64,
]
if platform_id not in valid_platforms:
raise RuntimeError(f"Platform {platform_id} is not supported for Perl at the moment")
perl_version = cls._get_perl_version()
if not perl_version:
raise RuntimeError(
"Perl is not installed. Please install Perl from https://www.perl.org/get.html and make sure it is added to your PATH."
)
perl_ls_version = cls._get_perl_language_server_version()
if not perl_ls_version:
raise RuntimeError(
"Found a Perl version but Perl::LanguageServer is not installed.\n"
"Please install Perl::LanguageServer: cpanm Perl::LanguageServer\n"
"See: https://metacpan.org/pod/Perl::LanguageServer"
)
return "perl -MPerl::LanguageServer -e 'Perl::LanguageServer::run'"
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
# Setup runtime dependencies before initializing
perl_ls_cmd = self._setup_runtime_dependencies()
super().__init__(
config, repository_root_path, ProcessLaunchInfo(cmd=perl_ls_cmd, cwd=repository_root_path), "perl", solidlsp_settings
)
self.request_id = 0
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for Perl::LanguageServer.
Based on the expected structure from Perl::LanguageServer::Methods::_rpcreq_initialize.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {"dynamicRegistration": True},
"hover": {"dynamicRegistration": True},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"symbol": {"dynamicRegistration": True},
},
},
"initializationOptions": {},
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
}
return initialize_params # type: ignore
def _start_server(self) -> None:
"""Start Perl::LanguageServer process"""
def register_capability_handler(params: Any) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
def do_nothing(params: Any) -> None:
return
def workspace_configuration_handler(params: Any) -> Any:
"""Handle workspace/configuration request from Perl::LanguageServer."""
log.info(f"Received workspace/configuration request: {params}")
perl_config = {
"perlInc": [self.repository_root_path, "."],
"fileFilter": [".pm", ".pl"],
"ignoreDirs": [".git", ".svn", "blib", "local", ".carton", "vendor", "_build", "cover_db"],
}
return [perl_config]
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_request("workspace/configuration", workspace_configuration_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting Perl::LanguageServer process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
log.info(
"After sent initialize params",
)
# Verify server capabilities
assert "textDocumentSync" in init_response["capabilities"]
assert "definitionProvider" in init_response["capabilities"]
assert "referencesProvider" in init_response["capabilities"]
self.server.notify.initialized({})
# Send workspace configuration to Perl::LanguageServer
# Perl::LanguageServer requires didChangeConfiguration to set perlInc, fileFilter, and ignoreDirs
# See: Perl::LanguageServer::Methods::workspace::_rpcnot_didChangeConfiguration
perl_config: DidChangeConfigurationParams = {
"settings": {
"perl": {
"perlInc": [self.repository_root_path, "."],
"fileFilter": [".pm", ".pl"],
"ignoreDirs": [".git", ".svn", "blib", "local", ".carton", "vendor", "_build", "cover_db"],
}
}
}
log.info(f"Sending workspace/didChangeConfiguration notification with config: {perl_config}")
self.server.notify.workspace_did_change_configuration(perl_config)
# Perl::LanguageServer needs time to index files and resolve cross-file references
# Without this delay, requests for definitions/references may return empty results
settling_time = 0.5
log.info(f"Allowing {settling_time} seconds for Perl::LanguageServer to index files...")
time.sleep(settling_time)
log.info("Perl::LanguageServer settling period complete")
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/perl_language_server.py",
"license": "MIT License",
"lines": 181,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/r_language_server.py | import logging
import os
import pathlib
import subprocess
from typing import Any
from overrides import override
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class RLanguageServer(SolidLanguageServer):
"""R Language Server implementation using the languageserver R package."""
@override
def _get_wait_time_for_cross_file_referencing(self) -> float:
return 5.0 # R language server needs extra time for workspace indexing in CI environments
@override
def is_ignored_dirname(self, dirname: str) -> bool:
# For R projects, ignore common directories
return super().is_ignored_dirname(dirname) or dirname in [
"renv", # R environment management
"packrat", # Legacy R package management
".Rproj.user", # RStudio project files
"vignettes", # Package vignettes (often large)
]
@staticmethod
def _check_r_installation() -> None:
"""Check if R and languageserver are available."""
try:
# Check R installation
result = subprocess.run(["R", "--version"], capture_output=True, text=True, check=False)
if result.returncode != 0:
raise RuntimeError("R is not installed or not in PATH")
# Check languageserver package
result = subprocess.run(
["R", "--vanilla", "--quiet", "--slave", "-e", "if (!require('languageserver', quietly=TRUE)) quit(status=1)"],
capture_output=True,
text=True,
check=False,
)
if result.returncode != 0:
raise RuntimeError(
"R languageserver package is not installed.\nInstall it with: R -e \"install.packages('languageserver')\""
)
except FileNotFoundError:
raise RuntimeError("R is not installed. Please install R from https://www.r-project.org/")
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
# Check R installation
self._check_r_installation()
# R command to start language server
# Use --vanilla for minimal startup and --quiet to suppress all output except LSP
# Set specific options to improve parsing stability
r_cmd = 'R --vanilla --quiet --slave -e "options(languageserver.debug_mode = FALSE); languageserver::run()"'
super().__init__(config, repository_root_path, ProcessLaunchInfo(cmd=r_cmd, cwd=repository_root_path), "r", solidlsp_settings)
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""Initialize params for R Language Server."""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"locale": "en",
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"completion": {
"dynamicRegistration": True,
"completionItem": {
"snippetSupport": True,
"commitCharactersSupport": True,
"documentationFormat": ["markdown", "plaintext"],
"deprecatedSupport": True,
"preselectSupport": True,
},
},
"hover": {"dynamicRegistration": True, "contentFormat": ["markdown", "plaintext"]},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
"formatting": {"dynamicRegistration": True},
"rangeFormatting": {"dynamicRegistration": True},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"symbol": {
"dynamicRegistration": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
},
},
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
}
return initialize_params # type: ignore
def _start_server(self) -> None:
"""Start R Language Server process."""
def window_log_message(msg: dict) -> None:
log.info(f"R LSP: window/logMessage: {msg}")
def do_nothing(params: Any) -> None:
return
def register_capability_handler(params: Any) -> None:
return
# Register LSP message handlers
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting R Language Server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info(
"Sending initialize request to R Language Server",
)
init_response = self.server.send.initialize(initialize_params)
# Verify server capabilities
capabilities = init_response.get("capabilities", {})
assert "textDocumentSync" in capabilities
if "completionProvider" in capabilities:
log.info("R LSP completion provider available")
if "definitionProvider" in capabilities:
log.info("R LSP definition provider available")
self.server.notify.initialized({})
# R Language Server is ready after initialization
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/r_language_server.py",
"license": "MIT License",
"lines": 134,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/regal_server.py | """Regal Language Server implementation for Rego policy files."""
import logging
import os
import shutil
from overrides import override
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.ls_utils import PathUtils
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class RegalLanguageServer(SolidLanguageServer):
"""
Provides Rego specific instantiation of the LanguageServer class using Regal.
Regal is the official linter and language server for Rego (Open Policy Agent's policy language).
See: https://github.com/StyraInc/regal
"""
@override
def is_ignored_dirname(self, dirname: str) -> bool:
return super().is_ignored_dirname(dirname) or dirname in [".regal", ".opa"]
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Creates a RegalLanguageServer instance.
This class is not meant to be instantiated directly. Use LanguageServer.create() instead.
:param config: Language server configuration
:param repository_root_path: Path to the repository root
:param solidlsp_settings: Settings for solidlsp
"""
# Regal should be installed system-wide (via CI or user installation)
regal_executable_path = shutil.which("regal")
if not regal_executable_path:
raise RuntimeError(
"Regal language server not found. Please install it from https://github.com/StyraInc/regal or via your package manager."
)
super().__init__(
config,
repository_root_path,
ProcessLaunchInfo(cmd=f"{regal_executable_path} language-server", cwd=repository_root_path),
"rego",
solidlsp_settings,
)
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the Regal Language Server.
:param repository_absolute_path: Absolute path to the repository
:return: LSP initialization parameters
"""
root_uri = PathUtils.path_to_uri(repository_absolute_path)
return {
"processId": os.getpid(),
"locale": "en",
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"completion": {"dynamicRegistration": True, "completionItem": {"snippetSupport": True}},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))}, # type: ignore[arg-type]
},
"hover": {"dynamicRegistration": True, "contentFormat": ["markdown", "plaintext"]}, # type: ignore[list-item]
"codeAction": {"dynamicRegistration": True},
"formatting": {"dynamicRegistration": True},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"symbol": {"dynamicRegistration": True},
},
},
"workspaceFolders": [
{
"name": os.path.basename(repository_absolute_path),
"uri": root_uri,
}
],
}
def _start_server(self) -> None:
"""Start Regal language server process and wait for initialization."""
def register_capability_handler(params) -> None: # type: ignore[no-untyped-def]
return
def window_log_message(msg) -> None: # type: ignore[no-untyped-def]
log.info(f"LSP: window/logMessage: {msg}")
def do_nothing(params) -> None: # type: ignore[no-untyped-def]
return
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting Regal language server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info(
"Sending initialize request from LSP client to LSP server and awaiting response",
)
init_response = self.server.send.initialize(initialize_params)
# Verify server capabilities
assert "capabilities" in init_response
assert "textDocumentSync" in init_response["capabilities"]
self.server.notify.initialized({})
# Regal server is ready immediately after initialization
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/regal_server.py",
"license": "MIT License",
"lines": 107,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
oraios/serena:src/solidlsp/language_servers/ruby_lsp.py | """
Ruby LSP Language Server implementation using Shopify's ruby-lsp.
Provides modern Ruby language server capabilities with improved performance.
"""
import json
import logging
import os
import pathlib
import shutil
import subprocess
import threading
from overrides import override
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams, InitializeResult
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class RubyLsp(SolidLanguageServer):
"""
Provides Ruby specific instantiation of the LanguageServer class using ruby-lsp.
Contains various configurations and settings specific to Ruby with modern LSP features.
"""
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
"""
Creates a RubyLsp instance. This class is not meant to be instantiated directly.
Use LanguageServer.create() instead.
"""
ruby_lsp_executable = self._setup_runtime_dependencies(config, repository_root_path)
super().__init__(
config, repository_root_path, ProcessLaunchInfo(cmd=ruby_lsp_executable, cwd=repository_root_path), "ruby", solidlsp_settings
)
self.analysis_complete = threading.Event()
self.service_ready_event = threading.Event()
# Set timeout for ruby-lsp requests - ruby-lsp is fast
self.set_request_timeout(30.0) # 30 seconds for initialization and requests
@override
def is_ignored_dirname(self, dirname: str) -> bool:
"""Override to ignore Ruby-specific directories that cause performance issues."""
ruby_ignored_dirs = [
"vendor", # Ruby vendor directory
".bundle", # Bundler cache
"tmp", # Temporary files
"log", # Log files
"coverage", # Test coverage reports
".yardoc", # YARD documentation cache
"doc", # Generated documentation
"node_modules", # Node modules (for Rails with JS)
"storage", # Active Storage files (Rails)
"public/packs", # Webpacker output
"public/webpack", # Webpack output
"public/assets", # Rails compiled assets
]
return super().is_ignored_dirname(dirname) or dirname in ruby_ignored_dirs
@override
def _get_wait_time_for_cross_file_referencing(self) -> float:
"""Override to provide optimal wait time for ruby-lsp cross-file reference resolution.
ruby-lsp typically initializes quickly, but may need a brief moment
for cross-file analysis in larger projects.
"""
return 0.5 # 500ms should be sufficient for ruby-lsp
@staticmethod
def _find_executable_with_extensions(executable_name: str) -> str | None:
"""
Find executable with Windows-specific extensions (.bat, .cmd, .exe) if on Windows.
Returns the full path to the executable or None if not found.
"""
import platform
if platform.system() == "Windows":
# Try Windows-specific extensions first
for ext in [".bat", ".cmd", ".exe"]:
path = shutil.which(f"{executable_name}{ext}")
if path:
return path
# Fall back to default search
return shutil.which(executable_name)
else:
# Unix systems
return shutil.which(executable_name)
@staticmethod
def _setup_runtime_dependencies(config: LanguageServerConfig, repository_root_path: str) -> list[str]:
"""
Setup runtime dependencies for ruby-lsp and return the command list to start the server.
Installation strategy: Bundler project > global ruby-lsp > gem install ruby-lsp
"""
# Detect rbenv-managed Ruby environment
# When .ruby-version exists, it indicates the project uses rbenv for version management.
# rbenv automatically reads .ruby-version to determine which Ruby version to use.
# Using "rbenv exec" ensures commands run with the correct Ruby version and its gems.
#
# Why rbenv is preferred over system Ruby:
# - Respects project-specific Ruby versions
# - Avoids bundler version mismatches between system and project
# - Ensures consistent environment across developers
#
# Fallback behavior:
# If .ruby-version doesn't exist or rbenv isn't installed, we fall back to system Ruby.
# This may cause issues if:
# - System Ruby version differs from what the project expects
# - System bundler version is incompatible with Gemfile.lock
# - Project gems aren't installed in system Ruby
ruby_version_file = os.path.join(repository_root_path, ".ruby-version")
use_rbenv = os.path.exists(ruby_version_file) and shutil.which("rbenv") is not None
if use_rbenv:
ruby_cmd = ["rbenv", "exec", "ruby"]
bundle_cmd = ["rbenv", "exec", "bundle"]
log.info(f"Using rbenv-managed Ruby (found {ruby_version_file})")
else:
ruby_cmd = ["ruby"]
bundle_cmd = ["bundle"]
if os.path.exists(ruby_version_file):
log.warning(
f"Found {ruby_version_file} but rbenv is not installed. "
"Using system Ruby. Consider installing rbenv for better version management: https://github.com/rbenv/rbenv",
)
else:
log.info("No .ruby-version file found, using system Ruby")
# Check if Ruby is installed
try:
result = subprocess.run(ruby_cmd + ["--version"], check=True, capture_output=True, cwd=repository_root_path, text=True)
ruby_version = result.stdout.strip()
log.info(f"Ruby version: {ruby_version}")
# Extract version number for compatibility checks
import re
version_match = re.search(r"ruby (\d+)\.(\d+)\.(\d+)", ruby_version)
if version_match:
major, minor, patch = map(int, version_match.groups())
if major < 2 or (major == 2 and minor < 6):
log.warning(f"Warning: Ruby {major}.{minor}.{patch} detected. ruby-lsp works best with Ruby 2.6+")
except subprocess.CalledProcessError as e:
error_msg = e.stderr if isinstance(e.stderr, str) else e.stderr.decode() if e.stderr else "Unknown error"
raise RuntimeError(
f"Error checking Ruby installation: {error_msg}. Please ensure Ruby is properly installed and in PATH."
) from e
except FileNotFoundError as e:
raise RuntimeError(
"Ruby is not installed or not found in PATH. Please install Ruby using one of these methods:\n"
" - Using rbenv: rbenv install 3.0.0 && rbenv global 3.0.0\n"
" - Using RVM: rvm install 3.0.0 && rvm use 3.0.0 --default\n"
" - Using asdf: asdf install ruby 3.0.0 && asdf global ruby 3.0.0\n"
" - System package manager (brew install ruby, apt install ruby, etc.)"
) from e
# Check for Bundler project (Gemfile exists)
gemfile_path = os.path.join(repository_root_path, "Gemfile")
gemfile_lock_path = os.path.join(repository_root_path, "Gemfile.lock")
is_bundler_project = os.path.exists(gemfile_path)
if is_bundler_project:
log.info("Detected Bundler project (Gemfile found)")
# Check if bundle command is available using Windows-compatible search
bundle_path = RubyLsp._find_executable_with_extensions(bundle_cmd[0] if len(bundle_cmd) == 1 else "bundle")
if not bundle_path:
# Try common bundle executables
for bundle_executable in ["bin/bundle", "bundle"]:
bundle_full_path: str | None
if bundle_executable.startswith("bin/"):
bundle_full_path = os.path.join(repository_root_path, bundle_executable)
else:
bundle_full_path = RubyLsp._find_executable_with_extensions(bundle_executable)
if bundle_full_path and os.path.exists(bundle_full_path):
bundle_path = bundle_full_path if bundle_executable.startswith("bin/") else bundle_executable
break
if not bundle_path:
log.warning(
"Bundler project detected but 'bundle' command not found. Falling back to global ruby-lsp installation.",
)
else:
# Check if ruby-lsp is in Gemfile.lock
ruby_lsp_in_bundle = False
if os.path.exists(gemfile_lock_path):
try:
with open(gemfile_lock_path) as f:
content = f.read()
ruby_lsp_in_bundle = "ruby-lsp" in content.lower()
except Exception as e:
log.warning(f"Warning: Could not read Gemfile.lock: {e}")
if ruby_lsp_in_bundle:
log.info("Found ruby-lsp in Gemfile.lock")
return bundle_cmd + ["exec", "ruby-lsp"]
else:
log.info(
"ruby-lsp not found in Gemfile.lock. Consider adding 'gem \"ruby-lsp\"' to your Gemfile for better compatibility.",
)
# Fall through to global installation check
# Check if ruby-lsp is available globally using Windows-compatible search
ruby_lsp_path = RubyLsp._find_executable_with_extensions("ruby-lsp")
if ruby_lsp_path:
log.info(f"Found ruby-lsp at: {ruby_lsp_path}")
return [ruby_lsp_path]
# Try to install ruby-lsp globally
log.info("ruby-lsp not found, attempting to install globally...")
try:
subprocess.run(["gem", "install", "ruby-lsp"], check=True, capture_output=True, cwd=repository_root_path)
log.info("Successfully installed ruby-lsp globally")
# Find the newly installed ruby-lsp executable
ruby_lsp_path = RubyLsp._find_executable_with_extensions("ruby-lsp")
return [ruby_lsp_path] if ruby_lsp_path else ["ruby-lsp"]
except subprocess.CalledProcessError as e:
error_msg = e.stderr if isinstance(e.stderr, str) else e.stderr.decode() if e.stderr else str(e)
if is_bundler_project:
raise RuntimeError(
f"Failed to install ruby-lsp globally: {error_msg}\n"
"For Bundler projects, please add 'gem \"ruby-lsp\"' to your Gemfile and run 'bundle install'.\n"
"Alternatively, install globally: gem install ruby-lsp"
) from e
raise RuntimeError(f"Failed to install ruby-lsp: {error_msg}\nPlease try installing manually: gem install ruby-lsp") from e
@staticmethod
def _detect_rails_project(repository_root_path: str) -> bool:
"""
Detect if this is a Rails project by checking for Rails-specific files.
"""
rails_indicators = [
"config/application.rb",
"config/environment.rb",
"app/controllers/application_controller.rb",
"Rakefile",
]
for indicator in rails_indicators:
if os.path.exists(os.path.join(repository_root_path, indicator)):
return True
# Check for Rails in Gemfile
gemfile_path = os.path.join(repository_root_path, "Gemfile")
if os.path.exists(gemfile_path):
try:
with open(gemfile_path) as f:
content = f.read().lower()
if "gem 'rails'" in content or 'gem "rails"' in content:
return True
except Exception:
pass
return False
@staticmethod
def _get_ruby_exclude_patterns(repository_root_path: str) -> list[str]:
"""
Get Ruby and Rails-specific exclude patterns for better performance.
"""
base_patterns = [
"**/vendor/**", # Ruby vendor directory
"**/.bundle/**", # Bundler cache
"**/tmp/**", # Temporary files
"**/log/**", # Log files
"**/coverage/**", # Test coverage reports
"**/.yardoc/**", # YARD documentation cache
"**/doc/**", # Generated documentation
"**/.git/**", # Git directory
"**/node_modules/**", # Node modules (for Rails with JS)
"**/public/assets/**", # Rails compiled assets
]
# Add Rails-specific patterns if this is a Rails project
if RubyLsp._detect_rails_project(repository_root_path):
base_patterns.extend(
[
"**/app/assets/builds/**", # Rails 7+ CSS builds
"**/storage/**", # Active Storage
"**/public/packs/**", # Webpacker
"**/public/webpack/**", # Webpack
]
)
return base_patterns
def _get_initialize_params(self) -> InitializeParams:
"""
Returns ruby-lsp specific initialization parameters.
"""
exclude_patterns = self._get_ruby_exclude_patterns(self.repository_root_path)
initialize_params = {
"processId": os.getpid(),
"rootPath": self.repository_root_path,
"rootUri": pathlib.Path(self.repository_root_path).as_uri(),
"capabilities": {
"workspace": {
"workspaceEdit": {"documentChanges": True},
"configuration": True,
},
"window": {
"workDoneProgress": True,
},
"textDocument": {
"documentSymbol": {
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
"completion": {
"completionItem": {
"snippetSupport": True,
"commitCharactersSupport": True,
}
},
},
},
"initializationOptions": {
# ruby-lsp enables all features by default, so we don't need to specify enabledFeatures
"experimentalFeaturesEnabled": False,
"featuresConfiguration": {},
"indexing": {
"includedPatterns": ["**/*.rb", "**/*.rake", "**/*.ru", "**/*.erb"],
"excludedPatterns": exclude_patterns,
},
},
}
return initialize_params # type: ignore
def _start_server(self) -> None:
"""
Starts the ruby-lsp Language Server for Ruby
"""
def register_capability_handler(params: dict) -> None:
assert "registrations" in params
for registration in params["registrations"]:
log.info(f"Registered capability: {registration['method']}")
return
def lang_status_handler(params: dict) -> None:
log.info(f"LSP: language/status: {params}")
if params.get("type") == "ready":
log.info("ruby-lsp service is ready.")
self.analysis_complete.set()
def execute_client_command_handler(params: dict) -> list:
return []
def do_nothing(params: dict) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
def progress_handler(params: dict) -> None:
# ruby-lsp sends progress notifications during indexing
log.debug(f"LSP: $/progress: {params}")
if "value" in params:
value = params["value"]
# Check for completion indicators
if value.get("kind") == "end":
log.info("ruby-lsp indexing complete ($/progress end)")
self.analysis_complete.set()
elif value.get("kind") == "begin":
log.info("ruby-lsp indexing started ($/progress begin)")
elif "percentage" in value:
percentage = value.get("percentage", 0)
log.debug(f"ruby-lsp indexing progress: {percentage}%")
# Handle direct progress format (fallback)
elif "token" in params and "value" in params:
token = params.get("token")
if isinstance(token, str) and "indexing" in token.lower():
value = params.get("value", {})
if value.get("kind") == "end" or value.get("percentage") == 100:
log.info("ruby-lsp indexing complete (token progress)")
self.analysis_complete.set()
def window_work_done_progress_create(params: dict) -> dict:
"""Handle workDoneProgress/create requests from ruby-lsp"""
log.debug(f"LSP: window/workDoneProgress/create: {params}")
return {}
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("language/status", lang_status_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_request("workspace/executeClientCommand", execute_client_command_handler)
self.server.on_notification("$/progress", progress_handler)
self.server.on_request("window/workDoneProgress/create", window_work_done_progress_create)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting ruby-lsp server process")
self.server.start()
initialize_params = self._get_initialize_params()
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
log.info(f"Sending init params: {json.dumps(initialize_params, indent=4)}")
init_response = self.server.send.initialize(initialize_params)
log.info(f"Received init response: {init_response}")
# Verify expected capabilities
# Note: ruby-lsp may return textDocumentSync in different formats (number or object)
text_document_sync = init_response["capabilities"].get("textDocumentSync")
if isinstance(text_document_sync, int):
assert text_document_sync in [1, 2], f"Unexpected textDocumentSync value: {text_document_sync}"
elif isinstance(text_document_sync, dict):
# ruby-lsp returns an object with change property
assert "change" in text_document_sync, "textDocumentSync object should have 'change' property"
assert "completionProvider" in init_response["capabilities"]
self.server.notify.initialized({})
# Wait for ruby-lsp to complete its initial indexing
# ruby-lsp has fast indexing
log.info("Waiting for ruby-lsp to complete initial indexing...")
if self.analysis_complete.wait(timeout=30.0):
log.info("ruby-lsp initial indexing complete, server ready")
else:
log.warning("Timeout waiting for ruby-lsp indexing completion, proceeding anyway")
# Fallback: assume indexing is complete after timeout
self.analysis_complete.set()
def _handle_initialization_response(self, init_response: InitializeResult) -> None:
"""
Handle the initialization response from ruby-lsp and validate capabilities.
"""
if "capabilities" in init_response:
capabilities = init_response["capabilities"]
# Validate textDocumentSync (ruby-lsp may return different formats)
text_document_sync = capabilities.get("textDocumentSync")
if isinstance(text_document_sync, int):
assert text_document_sync in [1, 2], f"Unexpected textDocumentSync value: {text_document_sync}"
elif isinstance(text_document_sync, dict):
# ruby-lsp returns an object with change property
assert "change" in text_document_sync, "textDocumentSync object should have 'change' property"
# Log important capabilities
important_capabilities = [
"completionProvider",
"hoverProvider",
"definitionProvider",
"referencesProvider",
"documentSymbolProvider",
"codeActionProvider",
"documentFormattingProvider",
"semanticTokensProvider",
]
for cap in important_capabilities:
if cap in capabilities:
log.debug(f"ruby-lsp {cap}: available")
# Signal that the service is ready
self.service_ready_event.set()
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/ruby_lsp.py",
"license": "MIT License",
"lines": 407,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/sourcekit_lsp.py | import logging
import os
import pathlib
import subprocess
import time
from overrides import override
from solidlsp import ls_types
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class SourceKitLSP(SolidLanguageServer):
"""
Provides Swift specific instantiation of the LanguageServer class using sourcekit-lsp.
"""
@override
def is_ignored_dirname(self, dirname: str) -> bool:
# For Swift projects, we should ignore:
# - .build: Swift Package Manager build artifacts
# - .swiftpm: Swift Package Manager metadata
# - node_modules: if the project has JavaScript components
# - dist/build: common output directories
return super().is_ignored_dirname(dirname) or dirname in [".build", ".swiftpm", "node_modules", "dist", "build"]
@staticmethod
def _get_sourcekit_lsp_version() -> str:
"""Get the installed sourcekit-lsp version or raise error if sourcekit was not found."""
try:
result = subprocess.run(["sourcekit-lsp", "-h"], capture_output=True, text=True, check=False)
if result.returncode == 0:
return result.stdout.strip()
else:
raise Exception(f"`sourcekit-lsp -h` resulted in: {result}")
except Exception as e:
raise RuntimeError(
"Could not find sourcekit-lsp, please install it as described in https://github.com/apple/sourcekit-lsp#installation"
"And make sure it is available on your PATH."
) from e
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
sourcekit_version = self._get_sourcekit_lsp_version()
log.info(f"Starting sourcekit lsp with version: {sourcekit_version}")
super().__init__(
config, repository_root_path, ProcessLaunchInfo(cmd="sourcekit-lsp", cwd=repository_root_path), "swift", solidlsp_settings
)
self.request_id = 0
self._did_sleep_before_requesting_references = False
self._initialization_timestamp: float | None = None
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the Swift Language Server.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"capabilities": {
"general": {
"markdown": {"parser": "marked", "version": "1.1.0"},
"positionEncodings": ["utf-16"],
"regularExpressions": {"engine": "ECMAScript", "version": "ES2020"},
"staleRequestSupport": {
"cancel": True,
"retryOnContentModified": [
"textDocument/semanticTokens/full",
"textDocument/semanticTokens/range",
"textDocument/semanticTokens/full/delta",
],
},
},
"notebookDocument": {"synchronization": {"dynamicRegistration": True, "executionSummarySupport": True}},
"textDocument": {
"callHierarchy": {"dynamicRegistration": True},
"codeAction": {
"codeActionLiteralSupport": {
"codeActionKind": {
"valueSet": [
"",
"quickfix",
"refactor",
"refactor.extract",
"refactor.inline",
"refactor.rewrite",
"source",
"source.organizeImports",
]
}
},
"dataSupport": True,
"disabledSupport": True,
"dynamicRegistration": True,
"honorsChangeAnnotations": True,
"isPreferredSupport": True,
"resolveSupport": {"properties": ["edit"]},
},
"codeLens": {"dynamicRegistration": True},
"colorProvider": {"dynamicRegistration": True},
"completion": {
"completionItem": {
"commitCharactersSupport": True,
"deprecatedSupport": True,
"documentationFormat": ["markdown", "plaintext"],
"insertReplaceSupport": True,
"insertTextModeSupport": {"valueSet": [1, 2]},
"labelDetailsSupport": True,
"preselectSupport": True,
"resolveSupport": {"properties": ["documentation", "detail", "additionalTextEdits"]},
"snippetSupport": True,
"tagSupport": {"valueSet": [1]},
},
"completionItemKind": {
"valueSet": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25]
},
"completionList": {"itemDefaults": ["commitCharacters", "editRange", "insertTextFormat", "insertTextMode", "data"]},
"contextSupport": True,
"dynamicRegistration": True,
"insertTextMode": 2,
},
"declaration": {"dynamicRegistration": True, "linkSupport": True},
"definition": {"dynamicRegistration": True, "linkSupport": True},
"diagnostic": {"dynamicRegistration": True, "relatedDocumentSupport": False},
"documentHighlight": {"dynamicRegistration": True},
"documentLink": {"dynamicRegistration": True, "tooltipSupport": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"labelSupport": True,
"symbolKind": {
"valueSet": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26]
},
"tagSupport": {"valueSet": [1]},
},
"foldingRange": {
"dynamicRegistration": True,
"foldingRange": {"collapsedText": False},
"foldingRangeKind": {"valueSet": ["comment", "imports", "region"]},
"lineFoldingOnly": True,
"rangeLimit": 5000,
},
"formatting": {"dynamicRegistration": True},
"hover": {"contentFormat": ["markdown", "plaintext"], "dynamicRegistration": True},
"implementation": {"dynamicRegistration": True, "linkSupport": True},
"inlayHint": {
"dynamicRegistration": True,
"resolveSupport": {"properties": ["tooltip", "textEdits", "label.tooltip", "label.location", "label.command"]},
},
"inlineValue": {"dynamicRegistration": True},
"linkedEditingRange": {"dynamicRegistration": True},
"onTypeFormatting": {"dynamicRegistration": True},
"publishDiagnostics": {
"codeDescriptionSupport": True,
"dataSupport": True,
"relatedInformation": True,
"tagSupport": {"valueSet": [1, 2]},
"versionSupport": False,
},
"rangeFormatting": {"dynamicRegistration": True, "rangesSupport": True},
"references": {"dynamicRegistration": True},
"rename": {
"dynamicRegistration": True,
"honorsChangeAnnotations": True,
"prepareSupport": True,
"prepareSupportDefaultBehavior": 1,
},
"selectionRange": {"dynamicRegistration": True},
"semanticTokens": {
"augmentsSyntaxTokens": True,
"dynamicRegistration": True,
"formats": ["relative"],
"multilineTokenSupport": False,
"overlappingTokenSupport": False,
"requests": {"full": {"delta": True}, "range": True},
"serverCancelSupport": True,
"tokenModifiers": [
"declaration",
"definition",
"readonly",
"static",
"deprecated",
"abstract",
"async",
"modification",
"documentation",
"defaultLibrary",
],
"tokenTypes": [
"namespace",
"type",
"class",
"enum",
"interface",
"struct",
"typeParameter",
"parameter",
"variable",
"property",
"enumMember",
"event",
"function",
"method",
"macro",
"keyword",
"modifier",
"comment",
"string",
"number",
"regexp",
"operator",
"decorator",
],
},
"signatureHelp": {
"contextSupport": True,
"dynamicRegistration": True,
"signatureInformation": {
"activeParameterSupport": True,
"documentationFormat": ["markdown", "plaintext"],
"parameterInformation": {"labelOffsetSupport": True},
},
},
"synchronization": {"didSave": True, "dynamicRegistration": True, "willSave": True, "willSaveWaitUntil": True},
"typeDefinition": {"dynamicRegistration": True, "linkSupport": True},
"typeHierarchy": {"dynamicRegistration": True},
},
"window": {
"showDocument": {"support": True},
"showMessage": {"messageActionItem": {"additionalPropertiesSupport": True}},
"workDoneProgress": True,
},
"workspace": {
"applyEdit": True,
"codeLens": {"refreshSupport": True},
"configuration": True,
"diagnostics": {"refreshSupport": True},
"didChangeConfiguration": {"dynamicRegistration": True},
"didChangeWatchedFiles": {"dynamicRegistration": True, "relativePatternSupport": True},
"executeCommand": {"dynamicRegistration": True},
"fileOperations": {
"didCreate": True,
"didDelete": True,
"didRename": True,
"dynamicRegistration": True,
"willCreate": True,
"willDelete": True,
"willRename": True,
},
"foldingRange": {"refreshSupport": True},
"inlayHint": {"refreshSupport": True},
"inlineValue": {"refreshSupport": True},
"semanticTokens": {"refreshSupport": False},
"symbol": {
"dynamicRegistration": True,
"resolveSupport": {"properties": ["location.range"]},
"symbolKind": {
"valueSet": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26]
},
"tagSupport": {"valueSet": [1]},
},
"workspaceEdit": {
"changeAnnotationSupport": {"groupsOnLabel": True},
"documentChanges": True,
"failureHandling": "textOnlyTransactional",
"normalizesLineEndings": True,
"resourceOperations": ["create", "rename", "delete"],
},
"workspaceFolders": True,
},
},
"clientInfo": {"name": "Visual Studio Code", "version": "1.102.2"},
"initializationOptions": {
"backgroundIndexing": True,
"backgroundPreparationMode": "enabled",
"textDocument/codeLens": {"supportedCommands": {"swift.debug": "swift.debug", "swift.run": "swift.run"}},
"window/didChangeActiveDocument": True,
"workspace/getReferenceDocument": True,
"workspace/peekDocuments": True,
},
"locale": "en",
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
}
return initialize_params # type: ignore[return-value]
def _start_server(self) -> None:
"""Start sourcekit-lsp server process"""
def register_capability_handler(_params: dict) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
def do_nothing(_params: dict) -> None:
return
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting sourcekit-lsp server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
capabilities = init_response["capabilities"]
log.info(f"SourceKit LSP capabilities: {list(capabilities.keys())}")
assert "textDocumentSync" in capabilities, "textDocumentSync capability missing"
assert "definitionProvider" in capabilities, "definitionProvider capability missing"
self.server.notify.initialized({})
# Mark initialization timestamp for smarter delay calculation
self._initialization_timestamp = time.time()
@override
def request_references(self, relative_file_path: str, line: int, column: int) -> list[ls_types.Location]:
# SourceKit LSP needs initialization + indexing time after startup
# before it can provide accurate reference information. This sleep
# prevents race conditions where references might not be available yet.
# CI environments need extra time for project indexing and cross-file analysis
if not self._did_sleep_before_requesting_references:
# Calculate minimum delay based on how much time has passed since initialization
if self._initialization_timestamp:
elapsed = time.time() - self._initialization_timestamp
# Increased CI delay for project indexing: 15s CI, 5s local
base_delay = 15 if os.getenv("CI") else 5
remaining_delay = max(2, base_delay - elapsed)
else:
# Fallback if initialization timestamp is missing
remaining_delay = 15 if os.getenv("CI") else 5
log.info(f"Sleeping {remaining_delay:.1f}s before requesting references for the first time (CI needs extra indexing time)")
time.sleep(remaining_delay)
self._did_sleep_before_requesting_references = True
# Get references with retry logic for CI stability
references = super().request_references(relative_file_path, line, column)
# In CI, if no references found, retry once after additional delay
if os.getenv("CI") and not references:
log.info("No references found in CI - retrying after additional 5s delay")
time.sleep(5)
references = super().request_references(relative_file_path, line, column)
return references
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/sourcekit_lsp.py",
"license": "MIT License",
"lines": 339,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/language_servers/zls.py | """
Provides Zig specific instantiation of the LanguageServer class using ZLS (Zig Language Server).
"""
import logging
import os
import pathlib
import platform
import shutil
import subprocess
from overrides import override
from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings
log = logging.getLogger(__name__)
class ZigLanguageServer(SolidLanguageServer):
"""
Provides Zig specific instantiation of the LanguageServer class using ZLS.
"""
@override
def is_ignored_dirname(self, dirname: str) -> bool:
# For Zig projects, we should ignore:
# - zig-cache: build cache directory
# - zig-out: default build output directory
# - .zig-cache: alternative cache location
# - node_modules: if the project has JavaScript components
return super().is_ignored_dirname(dirname) or dirname in ["zig-cache", "zig-out", ".zig-cache", "node_modules", "build", "dist"]
@staticmethod
def _get_zig_version() -> str | None:
"""Get the installed Zig version or None if not found."""
try:
result = subprocess.run(["zig", "version"], capture_output=True, text=True, check=False)
if result.returncode == 0:
return result.stdout.strip()
except FileNotFoundError:
return None
return None
@staticmethod
def _get_zls_version() -> str | None:
"""Get the installed ZLS version or None if not found."""
try:
result = subprocess.run(["zls", "--version"], capture_output=True, text=True, check=False)
if result.returncode == 0:
return result.stdout.strip()
except FileNotFoundError:
return None
return None
@staticmethod
def _check_zls_installed() -> bool:
"""Check if ZLS is installed in the system."""
return shutil.which("zls") is not None
@staticmethod
def _setup_runtime_dependency() -> bool:
"""
Check if required Zig runtime dependencies are available.
Raises RuntimeError with helpful message if dependencies are missing.
"""
# Check for Windows and provide error message
if platform.system() == "Windows":
raise RuntimeError(
"Windows is not supported by ZLS in this integration. "
"Cross-file references don't work reliably on Windows. Reason unknown."
)
zig_version = ZigLanguageServer._get_zig_version()
if not zig_version:
raise RuntimeError(
"Zig is not installed. Please install Zig from https://ziglang.org/download/ and make sure it is added to your PATH."
)
if not ZigLanguageServer._check_zls_installed():
zls_version = ZigLanguageServer._get_zls_version()
if not zls_version:
raise RuntimeError(
"Found Zig but ZLS (Zig Language Server) is not installed.\n"
"Please install ZLS from https://github.com/zigtools/zls\n"
"You can install it via:\n"
" - Package managers (brew install zls, scoop install zls, etc.)\n"
" - Download pre-built binaries from GitHub releases\n"
" - Build from source with: zig build -Doptimize=ReleaseSafe\n\n"
"After installation, make sure 'zls' is added to your PATH."
)
return True
def __init__(self, config: LanguageServerConfig, repository_root_path: str, solidlsp_settings: SolidLSPSettings):
self._setup_runtime_dependency()
super().__init__(config, repository_root_path, ProcessLaunchInfo(cmd="zls", cwd=repository_root_path), "zig", solidlsp_settings)
self.request_id = 0
@staticmethod
def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
"""
Returns the initialize params for the Zig Language Server.
"""
root_uri = pathlib.Path(repository_absolute_path).as_uri()
initialize_params = {
"locale": "en",
"capabilities": {
"textDocument": {
"synchronization": {"didSave": True, "dynamicRegistration": True},
"definition": {"dynamicRegistration": True},
"references": {"dynamicRegistration": True},
"documentSymbol": {
"dynamicRegistration": True,
"hierarchicalDocumentSymbolSupport": True,
"symbolKind": {"valueSet": list(range(1, 27))},
},
"completion": {
"dynamicRegistration": True,
"completionItem": {
"snippetSupport": True,
"commitCharactersSupport": True,
"documentationFormat": ["markdown", "plaintext"],
"deprecatedSupport": True,
"preselectSupport": True,
},
},
"hover": {
"dynamicRegistration": True,
"contentFormat": ["markdown", "plaintext"],
},
},
"workspace": {
"workspaceFolders": True,
"didChangeConfiguration": {"dynamicRegistration": True},
"configuration": True,
},
},
"processId": os.getpid(),
"rootPath": repository_absolute_path,
"rootUri": root_uri,
"workspaceFolders": [
{
"uri": root_uri,
"name": os.path.basename(repository_absolute_path),
}
],
"initializationOptions": {
# ZLS specific options based on schema.json
# Critical paths for ZLS to understand the project
"zig_exe_path": shutil.which("zig"), # Path to zig executable
"zig_lib_path": None, # Let ZLS auto-detect
"build_runner_path": None, # Let ZLS use its built-in runner
"global_cache_path": None, # Let ZLS use default cache
# Build configuration
"enable_build_on_save": True, # Enable to analyze project structure
"build_on_save_args": ["build"],
# Features
"enable_snippets": True,
"enable_argument_placeholders": True,
"semantic_tokens": "full",
"warn_style": False,
"highlight_global_var_declarations": False,
"skip_std_references": False,
"prefer_ast_check_as_child_process": True,
"completion_label_details": True,
# Inlay hints configuration
"inlay_hints_show_variable_type_hints": True,
"inlay_hints_show_struct_literal_field_type": True,
"inlay_hints_show_parameter_name": True,
"inlay_hints_show_builtin": True,
"inlay_hints_exclude_single_argument": True,
"inlay_hints_hide_redundant_param_names": False,
"inlay_hints_hide_redundant_param_names_last_token": False,
},
}
return initialize_params # type: ignore[return-value]
def _start_server(self) -> None:
"""Start ZLS server process"""
def register_capability_handler(params: dict) -> None:
return
def window_log_message(msg: dict) -> None:
log.info(f"LSP: window/logMessage: {msg}")
def do_nothing(params: dict) -> None:
return
self.server.on_request("client/registerCapability", register_capability_handler)
self.server.on_notification("window/logMessage", window_log_message)
self.server.on_notification("$/progress", do_nothing)
self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
log.info("Starting ZLS server process")
self.server.start()
initialize_params = self._get_initialize_params(self.repository_root_path)
log.info("Sending initialize request from LSP client to LSP server and awaiting response")
init_response = self.server.send.initialize(initialize_params)
# Verify server capabilities
assert "textDocumentSync" in init_response["capabilities"]
assert "definitionProvider" in init_response["capabilities"]
assert "documentSymbolProvider" in init_response["capabilities"]
assert "referencesProvider" in init_response["capabilities"]
self.server.notify.initialized({})
# ZLS server is ready after initialization
# (no need to wait for an event)
# Open build.zig if it exists to help ZLS understand project structure
build_zig_path = os.path.join(self.repository_root_path, "build.zig")
if os.path.exists(build_zig_path):
try:
with open(build_zig_path, encoding="utf-8") as f:
content = f.read()
uri = pathlib.Path(build_zig_path).as_uri()
self.server.notify.did_open_text_document(
{
"textDocument": {
"uri": uri,
"languageId": "zig",
"version": 1,
"text": content,
}
}
)
log.info("Opened build.zig to provide project context to ZLS")
except Exception as e:
log.warning(f"Failed to open build.zig: {e}")
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/language_servers/zls.py",
"license": "MIT License",
"lines": 209,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:src/solidlsp/util/subprocess_util.py | import platform
import subprocess
def subprocess_kwargs() -> dict:
"""
Returns a dictionary of keyword arguments for subprocess calls, adding platform-specific
flags that we want to use consistently.
"""
kwargs = {}
if platform.system() == "Windows":
kwargs["creationflags"] = subprocess.CREATE_NO_WINDOW # type: ignore
return kwargs
def quote_arg(arg: str) -> str:
"""
Adds quotes around an argument if it contains spaces.
"""
if " " not in arg:
return arg
return f'"{arg}"'
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/util/subprocess_util.py",
"license": "MIT License",
"lines": 18,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
oraios/serena:src/solidlsp/util/zip.py | import fnmatch
import logging
import os
import sys
import zipfile
from pathlib import Path
from typing import Optional
log = logging.getLogger(__name__)
class SafeZipExtractor:
"""
A utility class for extracting ZIP archives safely.
Features:
- Handles long file paths on Windows
- Skips files that fail to extract, continuing with the rest
- Creates necessary directories automatically
- Optional include/exclude pattern filters
"""
def __init__(
self,
archive_path: Path,
extract_dir: Path,
verbose: bool = True,
include_patterns: Optional[list[str]] = None,
exclude_patterns: Optional[list[str]] = None,
) -> None:
"""
Initialize the SafeZipExtractor.
:param archive_path: Path to the ZIP archive file
:param extract_dir: Directory where files will be extracted
:param verbose: Whether to log status messages
:param include_patterns: List of glob patterns for files to extract (None = all files)
:param exclude_patterns: List of glob patterns for files to skip
"""
self.archive_path = Path(archive_path)
self.extract_dir = Path(extract_dir)
self.verbose = verbose
self.include_patterns = include_patterns or []
self.exclude_patterns = exclude_patterns or []
def extract_all(self) -> None:
"""
Extract all files from the archive, skipping any that fail.
"""
if not self.archive_path.exists():
raise FileNotFoundError(f"Archive not found: {self.archive_path}")
if self.verbose:
log.info(f"Extracting from: {self.archive_path} to {self.extract_dir}")
with zipfile.ZipFile(self.archive_path, "r") as zip_ref:
for member in zip_ref.infolist():
if self._should_extract(member.filename):
self._extract_member(zip_ref, member)
elif self.verbose:
log.info(f"Skipped: {member.filename}")
def _should_extract(self, filename: str) -> bool:
"""
Determine whether a file should be extracted based on include/exclude patterns.
:param filename: The file name from the archive
:return: True if the file should be extracted
"""
# If include_patterns is set, only extract if it matches at least one pattern
if self.include_patterns:
if not any(fnmatch.fnmatch(filename, pattern) for pattern in self.include_patterns):
return False
# If exclude_patterns is set, skip if it matches any pattern
if self.exclude_patterns:
if any(fnmatch.fnmatch(filename, pattern) for pattern in self.exclude_patterns):
return False
return True
def _extract_member(self, zip_ref: zipfile.ZipFile, member: zipfile.ZipInfo) -> None:
"""
Extract a single member from the archive with error handling.
:param zip_ref: Open ZipFile object
:param member: ZipInfo object representing the file
"""
try:
target_path = self.extract_dir / member.filename
# Ensure directory structure exists
target_path.parent.mkdir(parents=True, exist_ok=True)
# Handle long paths on Windows
final_path = self._normalize_path(target_path)
# Extract file
with zip_ref.open(member) as source, open(final_path, "wb") as target:
target.write(source.read())
if self.verbose:
log.info(f"Extracted: {member.filename}")
except Exception as e:
log.error(f"Failed to extract {member.filename}: {e}")
@staticmethod
def _normalize_path(path: Path) -> Path:
"""
Adjust path to handle long paths on Windows.
:param path: Original path
:return: Normalized path
"""
if sys.platform.startswith("win"):
return Path(rf"\\?\{os.path.abspath(path)}")
return path # type: ignore
# Example usage:
# extractor = SafeZipExtractor(
# archive_path=Path("file.nupkg"),
# extract_dir=Path("extract_dir"),
# include_patterns=["*.dll", "*.xml"],
# exclude_patterns=["*.pdb"]
# )
# extractor.extract_all()
| {
"repo_id": "oraios/serena",
"file_path": "src/solidlsp/util/zip.py",
"license": "MIT License",
"lines": 104,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
oraios/serena:test/serena/test_tool_parameter_types.py | import logging
import pytest
from serena.config.serena_config import SerenaConfig
from serena.mcp import SerenaMCPFactory
from serena.tools.tools_base import ToolRegistry
@pytest.mark.parametrize("context", ("chatgpt", "codex", "oaicompat-agent"))
def test_all_tool_parameters_have_type(context):
"""
For every tool exposed by Serena, ensure that the generated
Open‑AI schema contains a ``type`` entry for each parameter.
"""
cfg = SerenaConfig(gui_log_window=False, web_dashboard=False, log_level=logging.ERROR)
registry = ToolRegistry()
cfg.included_optional_tools = tuple(registry.get_tool_names_optional())
factory = SerenaMCPFactory(context=context)
# Initialize the agent so that the tools are available
factory.agent = factory._create_serena_agent(cfg)
tools = list(factory._iter_tools())
for tool in tools:
mcp_tool = factory.make_mcp_tool(tool, openai_tool_compatible=True)
params = mcp_tool.parameters
# Collect any parameter that lacks a type
issues = []
print(f"Checking tool {tool}")
if "properties" not in params:
issues.append(f"Tool {tool.get_name()!r} missing properties section")
else:
for pname, prop in params["properties"].items():
if "type" not in prop:
issues.append(f"Tool {tool.get_name()!r} parameter {pname!r} missing 'type'")
if issues:
raise AssertionError("\n".join(issues))
| {
"repo_id": "oraios/serena",
"file_path": "test/serena/test_tool_parameter_types.py",
"license": "MIT License",
"lines": 32,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/al/test_al_basic.py | import os
import pytest
from serena.symbol import LanguageServerSymbol
from solidlsp import SolidLanguageServer
from solidlsp.language_servers.al_language_server import ALLanguageServer
from solidlsp.ls_config import Language
from solidlsp.ls_utils import SymbolUtils
from test.conftest import language_tests_enabled
pytestmark = [pytest.mark.al, pytest.mark.skipif(not language_tests_enabled(Language.AL), reason="AL tests are disabled")]
class TestExtractALDisplayName:
"""Tests for the ALLanguageServer._extract_al_display_name method."""
def test_table_with_quoted_name(self) -> None:
"""Test extraction from Table with quoted name."""
assert ALLanguageServer._extract_al_display_name('Table 50000 "TEST Customer"') == "TEST Customer"
def test_page_with_quoted_name(self) -> None:
"""Test extraction from Page with quoted name."""
assert ALLanguageServer._extract_al_display_name('Page 50001 "TEST Customer Card"') == "TEST Customer Card"
def test_codeunit_unquoted(self) -> None:
"""Test extraction from Codeunit with unquoted name."""
assert ALLanguageServer._extract_al_display_name("Codeunit 50000 CustomerMgt") == "CustomerMgt"
def test_enum_unquoted(self) -> None:
"""Test extraction from Enum with unquoted name."""
assert ALLanguageServer._extract_al_display_name("Enum 50000 CustomerType") == "CustomerType"
def test_interface_no_id(self) -> None:
"""Test extraction from Interface (no ID)."""
assert ALLanguageServer._extract_al_display_name("Interface IPaymentProcessor") == "IPaymentProcessor"
def test_table_extension(self) -> None:
"""Test extraction from TableExtension."""
assert ALLanguageServer._extract_al_display_name('TableExtension 50000 "Ext Customer"') == "Ext Customer"
def test_page_extension(self) -> None:
"""Test extraction from PageExtension."""
assert ALLanguageServer._extract_al_display_name('PageExtension 50000 "My Page Ext"') == "My Page Ext"
def test_non_al_object_unchanged(self) -> None:
"""Test that non-AL-object names pass through unchanged."""
assert ALLanguageServer._extract_al_display_name("fields") == "fields"
assert ALLanguageServer._extract_al_display_name("CreateCustomer") == "CreateCustomer"
assert ALLanguageServer._extract_al_display_name("Name") == "Name"
def test_report_with_quoted_name(self) -> None:
"""Test extraction from Report."""
assert ALLanguageServer._extract_al_display_name('Report 50000 "Sales Invoice"') == "Sales Invoice"
def test_query_unquoted(self) -> None:
"""Test extraction from Query."""
assert ALLanguageServer._extract_al_display_name("Query 50000 CustomerQuery") == "CustomerQuery"
@pytest.mark.al
class TestALLanguageServer:
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_symbol_names_are_normalized(self, language_server: SolidLanguageServer) -> None:
"""Test that AL symbol names are normalized (metadata stripped)."""
file_path = os.path.join("src", "Tables", "Customer.Table.al")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
_all_symbols, root_symbols = symbols
customer_table = None
for sym in root_symbols:
if sym.get("name") == "TEST Customer":
customer_table = sym
break
assert customer_table is not None, "Could not find 'TEST Customer' table symbol (name should be normalized)"
# Name should be just "TEST Customer", not "Table 50000 'TEST Customer'"
assert customer_table["name"] == "TEST Customer", f"Expected normalized name 'TEST Customer', got '{customer_table['name']}'"
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_find_symbol_exact_match(self, language_server: SolidLanguageServer) -> None:
"""Test that find_symbol can match AL symbols by normalized name without substring_matching."""
file_path = os.path.join("src", "Tables", "Customer.Table.al")
symbols = language_server.request_document_symbols(file_path)
# Find symbols that match 'TEST Customer' using LanguageServerSymbol.find()
for root in symbols.root_symbols:
ls_symbol = LanguageServerSymbol(root)
matches = ls_symbol.find("TEST Customer", substring_matching=False)
if matches:
assert len(matches) >= 1, "Should find at least one match for 'TEST Customer'"
assert matches[0].name == "TEST Customer", f"Expected 'TEST Customer', got '{matches[0].name}'"
return
pytest.fail("Could not find 'TEST Customer' symbol by exact name match")
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_find_codeunit_exact_match(self, language_server: SolidLanguageServer) -> None:
"""Test finding a codeunit by its normalized name."""
file_path = os.path.join("src", "Codeunits", "CustomerMgt.Codeunit.al")
symbols = language_server.request_document_symbols(file_path)
for root in symbols.root_symbols:
ls_symbol = LanguageServerSymbol(root)
matches = ls_symbol.find("CustomerMgt", substring_matching=False)
if matches:
assert len(matches) >= 1
assert matches[0].name == "CustomerMgt"
return
pytest.fail("Could not find 'CustomerMgt' symbol by exact name match")
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_find_symbol(self, language_server: SolidLanguageServer) -> None:
"""Test that AL Language Server can find symbols in the test repository with normalized names."""
symbols = language_server.request_full_symbol_tree()
# Check for table symbols - names should be normalized (no "Table 50000" prefix)
assert SymbolUtils.symbol_tree_contains_name(symbols, "TEST Customer"), "TEST Customer table not found in symbol tree"
# Check for page symbols
assert SymbolUtils.symbol_tree_contains_name(symbols, "TEST Customer Card"), "TEST Customer Card page not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "TEST Customer List"), "TEST Customer List page not found in symbol tree"
# Check for codeunit symbols
assert SymbolUtils.symbol_tree_contains_name(symbols, "CustomerMgt"), "CustomerMgt codeunit not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(
symbols, "PaymentProcessorImpl"
), "PaymentProcessorImpl codeunit not found in symbol tree"
# Check for enum symbol
assert SymbolUtils.symbol_tree_contains_name(symbols, "CustomerType"), "CustomerType enum not found in symbol tree"
# Check for interface symbol
assert SymbolUtils.symbol_tree_contains_name(symbols, "IPaymentProcessor"), "IPaymentProcessor interface not found in symbol tree"
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_find_table_fields(self, language_server: SolidLanguageServer) -> None:
"""Test that AL Language Server can find fields within a table."""
file_path = os.path.join("src", "Tables", "Customer.Table.al")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
# AL tables should have their fields as child symbols
customer_table = None
_all_symbols, root_symbols = symbols
for sym in root_symbols:
if sym.get("name") == "TEST Customer":
customer_table = sym
break
assert customer_table is not None, "Could not find TEST Customer table symbol"
# Check for field symbols (AL nests fields under a "fields" group)
if "children" in customer_table:
# Find the fields group
fields_group = None
for child in customer_table.get("children", []):
if child.get("name") == "fields":
fields_group = child
break
assert fields_group is not None, "Fields group not found in Customer table"
# Check actual field names
if "children" in fields_group:
field_names = [child.get("name", "") for child in fields_group.get("children", [])]
assert any("Name" in name for name in field_names), f"Name field not found. Fields: {field_names}"
assert any("Balance" in name for name in field_names), f"Balance field not found. Fields: {field_names}"
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_find_procedures(self, language_server: SolidLanguageServer) -> None:
"""Test that AL Language Server can find procedures in codeunits."""
file_path = os.path.join("src", "Codeunits", "CustomerMgt.Codeunit.al")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
# Find the codeunit symbol - name should be normalized to 'CustomerMgt'
codeunit_symbol = None
_all_symbols, root_symbols = symbols
for sym in root_symbols:
if sym.get("name") == "CustomerMgt":
codeunit_symbol = sym
break
assert codeunit_symbol is not None, "Could not find CustomerMgt codeunit symbol"
# Check for procedure symbols (if hierarchical)
if "children" in codeunit_symbol:
procedure_names = [child.get("name", "") for child in codeunit_symbol.get("children", [])]
assert any("CreateCustomer" in name for name in procedure_names), "CreateCustomer procedure not found"
assert any("TestNoSeries" in name for name in procedure_names), "TestNoSeries procedure not found"
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_find_referencing_symbols(self, language_server: SolidLanguageServer) -> None:
"""Test that AL Language Server can find references to symbols."""
# Find references to the Customer table from the CustomerMgt codeunit
table_file = os.path.join("src", "Tables", "Customer.Table.al")
symbols = language_server.request_document_symbols(table_file).get_all_symbols_and_roots()
# Find the Customer table symbol (name is normalized)
customer_symbol = None
_all_symbols, root_symbols = symbols
for sym in root_symbols:
if sym.get("name") == "TEST Customer":
customer_symbol = sym
break
if customer_symbol and "selectionRange" in customer_symbol:
sel_start = customer_symbol["selectionRange"]["start"]
refs = language_server.request_references(table_file, sel_start["line"], sel_start["character"])
# The Customer table should be referenced in CustomerMgt.Codeunit.al
assert any(
"CustomerMgt.Codeunit.al" in ref.get("relativePath", "") for ref in refs
), "Customer table should be referenced in CustomerMgt.Codeunit.al"
# It should also be referenced in CustomerCard.Page.al
assert any(
"CustomerCard.Page.al" in ref.get("relativePath", "") for ref in refs
), "Customer table should be referenced in CustomerCard.Page.al"
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_cross_file_symbols(self, language_server: SolidLanguageServer) -> None:
"""Test that AL Language Server can handle cross-file symbol relationships."""
# Get all symbols to verify cross-file visibility
symbols = language_server.request_full_symbol_tree()
# Count how many AL object symbols we found (names are now normalized)
al_object_names = []
def collect_symbols(syms: list) -> None:
for sym in syms:
if isinstance(sym, dict):
name = sym.get("name", "")
# These are normalized names now, so just collect them
al_object_names.append(name)
if "children" in sym:
collect_symbols(sym["children"])
collect_symbols(symbols)
# We should find expected normalized names
assert "TEST Customer" in al_object_names, f"TEST Customer not found in: {al_object_names}"
assert "CustomerMgt" in al_object_names, f"CustomerMgt not found in: {al_object_names}"
assert "CustomerType" in al_object_names, f"CustomerType not found in: {al_object_names}"
@pytest.mark.al
class TestALHoverInjection:
"""Tests for hover injection of original AL object names with type and ID."""
def _get_symbol_hover(self, language_server: SolidLanguageServer, file_path: str, symbol_name: str) -> tuple[dict | None, str | None]:
"""Helper to get hover info for a symbol by name.
Returns (hover_info, hover_value) tuple.
"""
symbols = language_server.request_document_symbols(file_path)
for sym in symbols.root_symbols:
if sym.get("name") == symbol_name:
sel_range = sym.get("selectionRange", {})
start = sel_range.get("start", {})
line = start.get("line", 0)
char = start.get("character", 0)
hover = language_server.request_hover(file_path, line, char)
if hover and "contents" in hover:
return hover, hover["contents"].get("value", "")
return hover, None
return None, None
def _get_child_symbol_hover(
self, language_server: SolidLanguageServer, file_path: str, parent_name: str, child_name_contains: str
) -> tuple[dict | None, str | None]:
"""Helper to get hover info for a child symbol.
Returns (hover_info, hover_value) tuple.
"""
symbols = language_server.request_document_symbols(file_path)
for sym in symbols.root_symbols:
if sym.get("name") == parent_name:
for child in sym.get("children", []):
if child_name_contains in child.get("name", ""):
sel_range = child.get("selectionRange", {})
start = sel_range.get("start", {})
line = start.get("line", 0)
char = start.get("character", 0)
hover = language_server.request_hover(file_path, line, char)
if hover and "contents" in hover:
return hover, hover["contents"].get("value", "")
return hover, None
return None, None
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_hover_table_injects_full_name(self, language_server: SolidLanguageServer) -> None:
"""Test that hovering over a Table symbol shows the full object name with ID."""
file_path = os.path.join("src", "Tables", "Customer.Table.al")
hover, value = self._get_symbol_hover(language_server, file_path, "TEST Customer")
assert hover is not None, "Hover should return a result for Table symbol"
assert value is not None, "Hover should have content"
assert '**Table 50000 "TEST Customer"**' in value, f"Hover should contain full Table name with ID. Got: {value[:200]}"
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_hover_page_injects_full_name(self, language_server: SolidLanguageServer) -> None:
"""Test that hovering over a Page symbol shows the full object name with ID."""
file_path = os.path.join("src", "Pages", "CustomerCard.Page.al")
hover, value = self._get_symbol_hover(language_server, file_path, "TEST Customer Card")
assert hover is not None, "Hover should return a result for Page symbol"
assert value is not None, "Hover should have content"
assert '**Page 50001 "TEST Customer Card"**' in value, f"Hover should contain full Page name with ID. Got: {value[:200]}"
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_hover_codeunit_injects_full_name(self, language_server: SolidLanguageServer) -> None:
"""Test that hovering over a Codeunit symbol shows the full object name with ID."""
file_path = os.path.join("src", "Codeunits", "CustomerMgt.Codeunit.al")
hover, value = self._get_symbol_hover(language_server, file_path, "CustomerMgt")
assert hover is not None, "Hover should return a result for Codeunit symbol"
assert value is not None, "Hover should have content"
assert "**Codeunit 50000 CustomerMgt**" in value, f"Hover should contain full Codeunit name with ID. Got: {value[:200]}"
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_hover_enum_injects_full_name(self, language_server: SolidLanguageServer) -> None:
"""Test that hovering over an Enum symbol shows the full object name with ID."""
file_path = os.path.join("src", "Enums", "CustomerType.Enum.al")
hover, value = self._get_symbol_hover(language_server, file_path, "CustomerType")
assert hover is not None, "Hover should return a result for Enum symbol"
assert value is not None, "Hover should have content"
assert "**Enum 50000 CustomerType**" in value, f"Hover should contain full Enum name with ID. Got: {value[:200]}"
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_hover_interface_injects_full_name(self, language_server: SolidLanguageServer) -> None:
"""Test that hovering over an Interface symbol shows the full object name (no ID for interfaces)."""
file_path = os.path.join("src", "Interfaces", "IPaymentProcessor.Interface.al")
hover, value = self._get_symbol_hover(language_server, file_path, "IPaymentProcessor")
assert hover is not None, "Hover should return a result for Interface symbol"
assert value is not None, "Hover should have content"
assert "**Interface IPaymentProcessor**" in value, f"Hover should contain full Interface name. Got: {value[:200]}"
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_hover_procedure_no_injection(self, language_server: SolidLanguageServer) -> None:
"""Test that hovering over a procedure does NOT inject object name (procedures are not normalized)."""
file_path = os.path.join("src", "Codeunits", "CustomerMgt.Codeunit.al")
hover, value = self._get_child_symbol_hover(language_server, file_path, "CustomerMgt", "CreateCustomer")
assert hover is not None, "Hover should return a result for procedure"
assert value is not None, "Hover should have content"
# Procedure hover should NOT start with ** (no injection)
assert not value.startswith("**"), f"Procedure hover should not have injected name. Got: {value[:200]}"
# But should contain procedure info
assert "CreateCustomer" in value, f"Hover should contain procedure name. Got: {value[:200]}"
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_hover_field_no_injection(self, language_server: SolidLanguageServer) -> None:
"""Test that hovering over a field does NOT inject object name (fields are not normalized)."""
file_path = os.path.join("src", "Tables", "Customer.Table.al")
symbols = language_server.request_document_symbols(file_path)
# Navigate to a field: Table -> fields -> specific field
for sym in symbols.root_symbols:
if sym.get("name") == "TEST Customer":
for child in sym.get("children", []):
if child.get("name") == "fields":
for field in child.get("children", []):
if "Name" in field.get("name", ""):
sel_range = field.get("selectionRange", {})
start = sel_range.get("start", {})
line = start.get("line", 0)
char = start.get("character", 0)
hover = language_server.request_hover(file_path, line, char)
assert hover is not None, "Hover should return a result for field"
value = hover.get("contents", {}).get("value", "")
# Field hover should NOT start with ** (no injection)
assert not value.startswith("**"), f"Field hover should not have injected name. Got: {value[:200]}"
return
pytest.fail("Could not find a field to test hover on")
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_hover_multiple_objects_correct_injection(self, language_server: SolidLanguageServer) -> None:
"""Test that multiple AL objects each get their correct full name injected."""
test_cases = [
(os.path.join("src", "Tables", "Customer.Table.al"), "TEST Customer", 'Table 50000 "TEST Customer"'),
(os.path.join("src", "Codeunits", "CustomerMgt.Codeunit.al"), "CustomerMgt", "Codeunit 50000 CustomerMgt"),
(os.path.join("src", "Enums", "CustomerType.Enum.al"), "CustomerType", "Enum 50000 CustomerType"),
]
for file_path, symbol_name, expected_full_name in test_cases:
hover, value = self._get_symbol_hover(language_server, file_path, symbol_name)
assert hover is not None, f"Hover should return a result for {symbol_name}"
assert value is not None, f"Hover should have content for {symbol_name}"
assert (
f"**{expected_full_name}**" in value
), f"Hover for {symbol_name} should contain '{expected_full_name}'. Got: {value[:200]}"
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_hover_contains_separator_after_injection(self, language_server: SolidLanguageServer) -> None:
"""Test that injected hover has a separator between injected name and original content."""
file_path = os.path.join("src", "Tables", "Customer.Table.al")
hover, value = self._get_symbol_hover(language_server, file_path, "TEST Customer")
assert hover is not None, "Hover should return a result"
assert value is not None, "Hover should have content"
# Should have the separator after the bold name
assert "---" in value, f"Hover should contain separator. Got: {value[:300]}"
# The separator should come after the injected name
bold_end = value.find("**", 2) # Find closing **
separator_pos = value.find("---")
assert separator_pos > bold_end, "Separator should come after the injected name"
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_hover_preserves_original_content(self, language_server: SolidLanguageServer) -> None:
"""Test that the original hover content is preserved after the injected name."""
file_path = os.path.join("src", "Tables", "Customer.Table.al")
hover, value = self._get_symbol_hover(language_server, file_path, "TEST Customer")
assert hover is not None, "Hover should return a result"
assert value is not None, "Hover should have content"
# Original AL hover content should still be present (the table structure)
assert "```al" in value, f"Hover should contain original AL code block. Got: {value[:500]}"
assert 'Table "TEST Customer"' in value, f"Hover should contain original table definition. Got: {value[:500]}"
@pytest.mark.al
class TestALPathNormalization:
"""Tests for path normalization in hover injection cache."""
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_hover_with_forward_slash_path(self, language_server: SolidLanguageServer) -> None:
"""Test that hover injection works with forward slash paths."""
file_path = "src/Tables/Customer.Table.al"
symbols = language_server.request_document_symbols(file_path)
for sym in symbols.root_symbols:
if sym.get("name") == "TEST Customer":
sel_range = sym.get("selectionRange", {})
start = sel_range.get("start", {})
line = start.get("line", 0)
char = start.get("character", 0)
hover = language_server.request_hover(file_path, line, char)
assert hover is not None, "Hover should return a result"
value = hover.get("contents", {}).get("value", "")
assert '**Table 50000 "TEST Customer"**' in value, f"Hover should have injection. Got: {value[:200]}"
return
pytest.fail("Could not find TEST Customer symbol")
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_hover_with_backslash_path(self, language_server: SolidLanguageServer) -> None:
"""Test that hover injection works with backslash paths (Windows style)."""
file_path = "src\\Tables\\Customer.Table.al"
symbols = language_server.request_document_symbols(file_path)
for sym in symbols.root_symbols:
if sym.get("name") == "TEST Customer":
sel_range = sym.get("selectionRange", {})
start = sel_range.get("start", {})
line = start.get("line", 0)
char = start.get("character", 0)
hover = language_server.request_hover(file_path, line, char)
assert hover is not None, "Hover should return a result"
value = hover.get("contents", {}).get("value", "")
assert '**Table 50000 "TEST Customer"**' in value, f"Hover should have injection. Got: {value[:200]}"
return
pytest.fail("Could not find TEST Customer symbol")
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_hover_with_mixed_path_formats_symbols_backslash_hover_forward(self, language_server: SolidLanguageServer) -> None:
"""Test hover works when symbols requested with backslash but hover with forward slash."""
file_path_backslash = "src\\Tables\\Customer.Table.al"
file_path_forward = "src/Tables/Customer.Table.al"
# Request symbols with backslash path
symbols = language_server.request_document_symbols(file_path_backslash)
for sym in symbols.root_symbols:
if sym.get("name") == "TEST Customer":
sel_range = sym.get("selectionRange", {})
start = sel_range.get("start", {})
line = start.get("line", 0)
char = start.get("character", 0)
# Request hover with forward slash path (different format)
hover = language_server.request_hover(file_path_forward, line, char)
assert hover is not None, "Hover should return a result"
value = hover.get("contents", {}).get("value", "")
assert (
'**Table 50000 "TEST Customer"**' in value
), f"Hover injection should work with mixed path formats. Got: {value[:200]}"
return
pytest.fail("Could not find TEST Customer symbol")
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_hover_with_mixed_path_formats_symbols_forward_hover_backslash(self, language_server: SolidLanguageServer) -> None:
"""Test hover works when symbols requested with forward slash but hover with backslash."""
file_path_forward = "src/Tables/Customer.Table.al"
file_path_backslash = "src\\Tables\\Customer.Table.al"
# Request symbols with forward slash path
symbols = language_server.request_document_symbols(file_path_forward)
for sym in symbols.root_symbols:
if sym.get("name") == "TEST Customer":
sel_range = sym.get("selectionRange", {})
start = sel_range.get("start", {})
line = start.get("line", 0)
char = start.get("character", 0)
# Request hover with backslash path (different format)
hover = language_server.request_hover(file_path_backslash, line, char)
assert hover is not None, "Hover should return a result"
value = hover.get("contents", {}).get("value", "")
assert (
'**Table 50000 "TEST Customer"**' in value
), f"Hover injection should work with mixed path formats. Got: {value[:200]}"
return
pytest.fail("Could not find TEST Customer symbol")
@pytest.mark.parametrize("language_server", [Language.AL], indirect=True)
def test_hover_caching_multiple_files_different_path_formats(self, language_server: SolidLanguageServer) -> None:
"""Test that hover injection cache works correctly across multiple files with different path formats."""
test_cases = [
("src/Tables/Customer.Table.al", "src\\Tables\\Customer.Table.al", "TEST Customer", 'Table 50000 "TEST Customer"'),
(
"src\\Codeunits\\CustomerMgt.Codeunit.al",
"src/Codeunits/CustomerMgt.Codeunit.al",
"CustomerMgt",
"Codeunit 50000 CustomerMgt",
),
]
for symbols_path, hover_path, symbol_name, expected_injection in test_cases:
# Request symbols with one path format
symbols = language_server.request_document_symbols(symbols_path)
for sym in symbols.root_symbols:
if sym.get("name") == symbol_name:
sel_range = sym.get("selectionRange", {})
start = sel_range.get("start", {})
line = start.get("line", 0)
char = start.get("character", 0)
# Request hover with different path format
hover = language_server.request_hover(hover_path, line, char)
assert hover is not None, f"Hover should return a result for {symbol_name}"
value = hover.get("contents", {}).get("value", "")
assert (
f"**{expected_injection}**" in value
), f"Hover for {symbol_name} should have injection with mixed paths. Got: {value[:200]}"
break
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/al/test_al_basic.py",
"license": "MIT License",
"lines": 454,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/bash/test_bash_basic.py | """
Basic integration tests for the bash language server functionality.
These tests validate the functionality of the language server APIs
like request_document_symbols using the bash test repository.
"""
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
@pytest.mark.bash
class TestBashLanguageServerBasics:
"""Test basic functionality of the bash language server."""
@pytest.mark.parametrize("language_server", [Language.BASH], indirect=True)
def test_bash_language_server_initialization(self, language_server: SolidLanguageServer) -> None:
"""Test that bash language server can be initialized successfully."""
assert language_server is not None
assert language_server.language == Language.BASH
@pytest.mark.parametrize("language_server", [Language.BASH], indirect=True)
def test_bash_request_document_symbols(self, language_server: SolidLanguageServer) -> None:
"""Test request_document_symbols for bash files."""
# Test getting symbols from main.sh
all_symbols, _root_symbols = language_server.request_document_symbols("main.sh").get_all_symbols_and_roots()
# Extract function symbols (LSP Symbol Kind 12)
function_symbols = [symbol for symbol in all_symbols if symbol.get("kind") == 12]
function_names = [symbol["name"] for symbol in function_symbols]
# Should detect all 3 functions from main.sh
assert "greet_user" in function_names, "Should find greet_user function"
assert "process_items" in function_names, "Should find process_items function"
assert "main" in function_names, "Should find main function"
assert len(function_symbols) >= 3, f"Should find at least 3 functions, found {len(function_symbols)}"
@pytest.mark.parametrize("language_server", [Language.BASH], indirect=True)
def test_bash_request_document_symbols_with_body(self, language_server: SolidLanguageServer) -> None:
"""Test request_document_symbols with body extraction."""
# Test with include_body=True
all_symbols, _root_symbols = language_server.request_document_symbols("main.sh").get_all_symbols_and_roots()
function_symbols = [symbol for symbol in all_symbols if symbol.get("kind") == 12]
# Find greet_user function and check it has body
greet_user_symbol = next((sym for sym in function_symbols if sym["name"] == "greet_user"), None)
assert greet_user_symbol is not None, "Should find greet_user function"
if "body" in greet_user_symbol:
body = greet_user_symbol["body"].get_text()
assert "function greet_user()" in body, "Function body should contain function definition"
assert "case" in body.lower(), "Function body should contain case statement"
@pytest.mark.parametrize("language_server", [Language.BASH], indirect=True)
def test_bash_utils_functions(self, language_server: SolidLanguageServer) -> None:
"""Test function detection in utils.sh file."""
# Test with utils.sh as well
utils_all_symbols, _utils_root_symbols = language_server.request_document_symbols("utils.sh").get_all_symbols_and_roots()
utils_function_symbols = [symbol for symbol in utils_all_symbols if symbol.get("kind") == 12]
utils_function_names = [symbol["name"] for symbol in utils_function_symbols]
# Should detect functions from utils.sh
expected_utils_functions = [
"to_uppercase",
"to_lowercase",
"trim_whitespace",
"backup_file",
"contains_element",
"log_message",
"is_valid_email",
"is_number",
]
for func_name in expected_utils_functions:
assert func_name in utils_function_names, f"Should find {func_name} function in utils.sh"
assert len(utils_function_symbols) >= 8, f"Should find at least 8 functions in utils.sh, found {len(utils_function_symbols)}"
@pytest.mark.parametrize("language_server", [Language.BASH], indirect=True)
def test_bash_function_syntax_patterns(self, language_server: SolidLanguageServer) -> None:
"""Test that LSP detects different bash function syntax patterns correctly."""
# Test main.sh (has both 'function' keyword and traditional syntax)
main_all_symbols, _main_root_symbols = language_server.request_document_symbols("main.sh").get_all_symbols_and_roots()
main_functions = [symbol for symbol in main_all_symbols if symbol.get("kind") == 12]
main_function_names = [func["name"] for func in main_functions]
# Test utils.sh (all use 'function' keyword)
utils_all_symbols, _utils_root_symbols = language_server.request_document_symbols("utils.sh").get_all_symbols_and_roots()
utils_functions = [symbol for symbol in utils_all_symbols if symbol.get("kind") == 12]
utils_function_names = [func["name"] for func in utils_functions]
# Verify LSP detects both syntax patterns
# main() uses traditional syntax: main() {
assert "main" in main_function_names, "LSP should detect traditional function syntax"
# Functions with 'function' keyword: function name() {
assert "greet_user" in main_function_names, "LSP should detect function keyword syntax"
assert "process_items" in main_function_names, "LSP should detect function keyword syntax"
# Verify all expected utils functions are detected by LSP
expected_utils = [
"to_uppercase",
"to_lowercase",
"trim_whitespace",
"backup_file",
"contains_element",
"log_message",
"is_valid_email",
"is_number",
]
for expected_func in expected_utils:
assert expected_func in utils_function_names, f"LSP should detect {expected_func} function"
# Verify total counts match expectations
assert len(main_functions) >= 3, f"Should find at least 3 functions in main.sh, found {len(main_functions)}"
assert len(utils_functions) >= 8, f"Should find at least 8 functions in utils.sh, found {len(utils_functions)}"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/bash/test_bash_basic.py",
"license": "MIT License",
"lines": 96,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/dart/test_dart_basic.py | import os
from pathlib import Path
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
from solidlsp.ls_types import SymbolKind
from solidlsp.ls_utils import SymbolUtils
@pytest.mark.dart
class TestDartLanguageServer:
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.DART], indirect=True)
def test_ls_is_running(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test that the language server starts and stops successfully."""
# The fixture already handles start and stop
assert language_server.is_running()
assert Path(language_server.language_server.repository_root_path).resolve() == repo_path.resolve()
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.DART], indirect=True)
def test_find_definition_within_file(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test finding definition of a method within the same file."""
# In lib/main.dart:
# Line 105: final result1 = calc.add(5, 3); // Reference to add method
# Line 12: int add(int a, int b) { // Definition of add method
# Find definition of 'add' method from its usage
main_dart_path = str(repo_path / "lib" / "main.dart")
# Position: calc.add(5, 3) - cursor on 'add'
# Line 105 (1-indexed) = line 104 (0-indexed), char position around 22
definition_location_list = language_server.request_definition(main_dart_path, 104, 22)
assert definition_location_list, f"Expected non-empty definition_location_list but got {definition_location_list=}"
assert len(definition_location_list) >= 1
definition_location = definition_location_list[0]
assert definition_location["uri"].endswith("main.dart")
# Definition of add method should be around line 11 (0-indexed)
# But language server may return different positions
assert definition_location["range"]["start"]["line"] >= 0
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.DART], indirect=True)
def test_find_definition_across_files(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test finding definition across different files."""
# Test finding definition of MathHelper class which is in helper.dart
# In lib/main.dart line 50: MathHelper.power(step1, 2)
main_dart_path = str(repo_path / "lib" / "main.dart")
# Position: MathHelper.power(step1, 2) - cursor on 'MathHelper'
# Line 50 (1-indexed) = line 49 (0-indexed), char position around 18
definition_location_list = language_server.request_definition(main_dart_path, 49, 18)
# Skip the test if language server doesn't find cross-file references
# This is acceptable for a basic test - the important thing is that LS is working
if not definition_location_list:
pytest.skip("Language server doesn't support cross-file definition lookup for this case")
assert len(definition_location_list) >= 1
definition_location = definition_location_list[0]
assert definition_location["uri"].endswith("helper.dart")
assert definition_location["range"]["start"]["line"] >= 0
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.DART], indirect=True)
def test_find_definition_class_method(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test finding definition of a class method."""
# In lib/main.dart:
# Line 50: final step2 = MathHelper.power(step1, 2); // Reference to MathHelper.power method
# In lib/helper.dart:
# Line 14: static double power(double base, int exponent) { // Definition of power method
main_dart_path = str(repo_path / "lib" / "main.dart")
# Position: MathHelper.power(step1, 2) - cursor on 'power'
# Line 50 (1-indexed) = line 49 (0-indexed), char position around 30
definition_location_list = language_server.request_definition(main_dart_path, 49, 30)
assert definition_location_list, f"Expected non-empty definition_location_list but got {definition_location_list=}"
assert len(definition_location_list) >= 1
definition_location = definition_location_list[0]
assert definition_location["uri"].endswith("helper.dart")
# Definition of power method should be around line 13 (0-indexed)
assert 12 <= definition_location["range"]["start"]["line"] <= 16
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.DART], indirect=True)
def test_find_references_within_file(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test finding references to a method within the same file."""
main_dart_path = str(repo_path / "lib" / "main.dart")
# Find references to the 'add' method from its definition
# Line 12: int add(int a, int b) { // Definition of add method
# Line 105: final result1 = calc.add(5, 3); // Usage of add method
references = language_server.request_references(main_dart_path, 11, 6) # cursor on 'add' in definition
assert references, f"Expected non-empty references but got {references=}"
# Should find at least the usage of add method
assert len(references) >= 1
# Check that we have a reference in main.dart
main_dart_references = [ref for ref in references if ref["uri"].endswith("main.dart")]
assert len(main_dart_references) >= 1
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.DART], indirect=True)
def test_find_references_across_files(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test finding references across different files."""
helper_dart_path = str(repo_path / "lib" / "helper.dart")
# Find references to the 'subtract' function from its definition in helper.dart
# Definition is in helper.dart, usage is in main.dart
references = language_server.request_references(helper_dart_path, 4, 4) # cursor on 'subtract' in definition
assert references, f"Expected non-empty references for subtract function but got {references=}"
# Should find references in main.dart
main_dart_references = [ref for ref in references if ref["uri"].endswith("main.dart")]
assert len(main_dart_references) >= 1
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.DART], indirect=True)
def test_find_definition_constructor(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test finding definition of a constructor call."""
main_dart_path = str(repo_path / "lib" / "main.dart")
# In lib/main.dart:
# Line 104: final calc = Calculator(); // Reference to Calculator constructor
# Line 4: class Calculator { // Definition of Calculator class
definition_location_list = language_server.request_definition(main_dart_path, 103, 18) # cursor on 'Calculator'
assert definition_location_list, f"Expected non-empty definition_location_list but got {definition_location_list=}"
assert len(definition_location_list) >= 1
definition_location = definition_location_list[0]
assert definition_location["uri"].endswith("main.dart")
# Definition of Calculator class should be around line 3 (0-indexed)
assert 3 <= definition_location["range"]["start"]["line"] <= 7
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
@pytest.mark.parametrize("repo_path", [Language.DART], indirect=True)
def test_find_definition_import(self, language_server: SolidLanguageServer, repo_path: Path) -> None:
"""Test finding definition through imports."""
models_dart_path = str(repo_path / "lib" / "models.dart")
# Test finding definition of User class name where it's used
# In lib/models.dart line 27 (constructor): User(this.id, this.name, this.email, this._age);
definition_location_list = language_server.request_definition(models_dart_path, 26, 2) # cursor on 'User' in constructor
# Skip if language server doesn't find definition in this case
if not definition_location_list:
pytest.skip("Language server doesn't support definition lookup for this case")
assert len(definition_location_list) >= 1
definition_location = definition_location_list[0]
# Language server might return SDK files instead of local files
# This is acceptable behavior - the important thing is that it found a definition
assert "dart" in definition_location["uri"].lower()
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
def test_find_symbol(self, language_server: SolidLanguageServer) -> None:
"""Test finding symbols in the full symbol tree."""
symbols = language_server.request_full_symbol_tree()
assert SymbolUtils.symbol_tree_contains_name(symbols, "Calculator"), "Calculator class not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "add"), "add method not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "subtract"), "subtract function not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "MathHelper"), "MathHelper class not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "User"), "User class not found in symbol tree"
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
def test_find_referencing_symbols(self, language_server: SolidLanguageServer) -> None:
"""Test finding references using symbol selection range."""
file_path = os.path.join("lib", "main.dart")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
# Handle nested symbol structure - symbols can be nested in lists
symbol_list = symbols[0] if symbols and isinstance(symbols[0], list) else symbols
# Find the 'add' method symbol in Calculator class
add_symbol = None
for sym in symbol_list:
if sym.get("name") == "add":
add_symbol = sym
break
# Check for nested symbols (methods inside classes)
if "children" in sym and sym.get("name") == "Calculator":
for child in sym["children"]:
if child.get("name") == "add":
add_symbol = child
break
if add_symbol:
break
assert add_symbol is not None, "Could not find 'add' method symbol in main.dart"
sel_start = add_symbol["selectionRange"]["start"]
refs = language_server.request_references(file_path, sel_start["line"], sel_start["character"])
# Check that we found references - at least one should be in main.dart
assert any(
"main.dart" in ref.get("relativePath", "") or "main.dart" in ref.get("uri", "") for ref in refs
), "main.dart should reference add method (tried all positions in selectionRange)"
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
def test_request_containing_symbol_method(self, language_server: SolidLanguageServer) -> None:
"""Test request_containing_symbol for a method."""
file_path = os.path.join("lib", "main.dart")
# Line 14 is inside the add method body (around 'final result = a + b;')
containing_symbol = language_server.request_containing_symbol(file_path, 13, 10, include_body=True)
# Verify that we found the containing symbol
if containing_symbol is not None:
assert containing_symbol["name"] == "add"
assert containing_symbol["kind"] == SymbolKind.Method
if "body" in containing_symbol:
body = containing_symbol["body"].get_text()
assert "add" in body or "final result" in body
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
def test_request_containing_symbol_class(self, language_server: SolidLanguageServer) -> None:
"""Test request_containing_symbol for a class."""
file_path = os.path.join("lib", "main.dart")
# Line 4 is the Calculator class definition line
containing_symbol = language_server.request_containing_symbol(file_path, 4, 6)
# Verify that we found the containing symbol
if containing_symbol is not None:
assert containing_symbol["name"] == "Calculator"
assert containing_symbol["kind"] == SymbolKind.Class
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
def test_request_containing_symbol_nested(self, language_server: SolidLanguageServer) -> None:
"""Test request_containing_symbol with nested scopes."""
file_path = os.path.join("lib", "main.dart")
# Line 14 is inside the add method inside Calculator class
containing_symbol = language_server.request_containing_symbol(file_path, 13, 20)
# Verify that we found the innermost containing symbol (the method)
if containing_symbol is not None:
assert containing_symbol["name"] == "add"
assert containing_symbol["kind"] == SymbolKind.Method
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
def test_request_defining_symbol_variable(self, language_server: SolidLanguageServer) -> None:
"""Test request_defining_symbol for a variable usage."""
file_path = os.path.join("lib", "main.dart")
# Line 14 contains 'final result = a + b;' - test position on 'result'
defining_symbol = language_server.request_defining_symbol(file_path, 13, 10)
# The defining symbol might be the variable itself or the containing method
# This is acceptable behavior - different language servers handle this differently
if defining_symbol is not None:
assert defining_symbol.get("name") in ["result", "add"]
if defining_symbol.get("name") == "add":
assert defining_symbol.get("kind") == SymbolKind.Method.value
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
def test_request_defining_symbol_imported_class(self, language_server: SolidLanguageServer) -> None:
"""Test request_defining_symbol for an imported class/function."""
file_path = os.path.join("lib", "main.dart")
# Line 20 references 'subtract' which was imported from helper.dart
defining_symbol = language_server.request_defining_symbol(file_path, 19, 18)
# Verify that we found the defining symbol - this should be the subtract function from helper.dart
if defining_symbol is not None:
assert defining_symbol.get("name") == "subtract"
# Could be Function or Method depending on language server interpretation
assert defining_symbol.get("kind") in [SymbolKind.Function.value, SymbolKind.Method.value]
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
def test_request_defining_symbol_class_method(self, language_server: SolidLanguageServer) -> None:
"""Test request_defining_symbol for a static class method."""
file_path = os.path.join("lib", "main.dart")
# Line 50 references MathHelper.power - test position on 'power'
defining_symbol = language_server.request_defining_symbol(file_path, 49, 30)
# Verify that we found the defining symbol - should be the power method
if defining_symbol is not None:
assert defining_symbol.get("name") == "power"
assert defining_symbol.get("kind") == SymbolKind.Method.value
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
def test_request_document_symbols(self, language_server: SolidLanguageServer) -> None:
"""Test getting document symbols from a Dart file."""
file_path = os.path.join("lib", "main.dart")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
# Check that we have symbols
assert len(symbols) > 0
# Flatten the symbols if they're nested
symbol_list = symbols[0] if symbols and isinstance(symbols[0], list) else symbols
# Look for expected classes and methods
symbol_names = [s.get("name") for s in symbol_list]
assert "Calculator" in symbol_names
# Check for nested symbols (methods inside classes) - optional
calculator_symbol = next((s for s in symbol_list if s.get("name") == "Calculator"), None)
if calculator_symbol and "children" in calculator_symbol and calculator_symbol["children"]:
method_names = [child.get("name") for child in calculator_symbol["children"]]
# If children are populated, we should find the add method
assert "add" in method_names
else:
# Some language servers may not populate children in document symbols
# This is acceptable behavior - the important thing is we found the class
pass
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
def test_request_referencing_symbols_comprehensive(self, language_server: SolidLanguageServer) -> None:
"""Test comprehensive referencing symbols functionality."""
file_path = os.path.join("lib", "main.dart")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
# Handle nested symbol structure
symbol_list = symbols[0] if symbols and isinstance(symbols[0], list) else symbols
# Find Calculator class and test its references
calculator_symbol = None
for sym in symbol_list:
if sym.get("name") == "Calculator":
calculator_symbol = sym
break
if calculator_symbol and "selectionRange" in calculator_symbol:
sel_start = calculator_symbol["selectionRange"]["start"]
refs = language_server.request_references(file_path, sel_start["line"], sel_start["character"])
# Should find references to Calculator (constructor calls, etc.)
if refs:
# Verify the structure of referencing symbols
for ref in refs:
assert "uri" in ref or "relativePath" in ref
if "range" in ref:
assert "start" in ref["range"]
assert "end" in ref["range"]
@pytest.mark.parametrize("language_server", [Language.DART], indirect=True)
def test_cross_file_symbol_resolution(self, language_server: SolidLanguageServer) -> None:
"""Test symbol resolution across multiple files."""
helper_file_path = os.path.join("lib", "helper.dart")
# Test finding references to subtract function from helper.dart in main.dart
helper_symbols = language_server.request_document_symbols(helper_file_path).get_all_symbols_and_roots()
symbol_list = helper_symbols[0] if helper_symbols and isinstance(helper_symbols[0], list) else helper_symbols
subtract_symbol = next((s for s in symbol_list if s.get("name") == "subtract"), None)
if subtract_symbol and "selectionRange" in subtract_symbol:
sel_start = subtract_symbol["selectionRange"]["start"]
refs = language_server.request_references(helper_file_path, sel_start["line"], sel_start["character"])
# Should find references in main.dart
main_dart_refs = [ref for ref in refs if "main.dart" in ref.get("uri", "") or "main.dart" in ref.get("relativePath", "")]
# Note: This may not always work depending on language server capabilities
# So we don't assert - just verify the structure if we get results
if main_dart_refs:
for ref in main_dart_refs:
assert "range" in ref or "location" in ref
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/dart/test_dart_basic.py",
"license": "MIT License",
"lines": 296,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/elm/test_elm_basic.py | import os
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
from solidlsp.ls_utils import SymbolUtils
@pytest.mark.elm
class TestElmLanguageServer:
@pytest.mark.parametrize("language_server", [Language.ELM], indirect=True)
def test_find_symbol(self, language_server: SolidLanguageServer) -> None:
symbols = language_server.request_full_symbol_tree()
assert SymbolUtils.symbol_tree_contains_name(symbols, "greet"), "greet function not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "calculateSum"), "calculateSum function not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "formatMessage"), "formatMessage function not found in symbol tree"
assert SymbolUtils.symbol_tree_contains_name(symbols, "addNumbers"), "addNumbers function not found in symbol tree"
@pytest.mark.parametrize("language_server", [Language.ELM], indirect=True)
def test_find_references_within_file(self, language_server: SolidLanguageServer) -> None:
file_path = os.path.join("Main.elm")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
greet_symbol = None
for sym in symbols[0]:
if sym.get("name") == "greet":
greet_symbol = sym
break
assert greet_symbol is not None, "Could not find 'greet' symbol in Main.elm"
sel_start = greet_symbol["selectionRange"]["start"]
refs = language_server.request_references(file_path, sel_start["line"], sel_start["character"])
assert any("Main.elm" in ref.get("relativePath", "") for ref in refs), "Main.elm should reference greet function"
@pytest.mark.parametrize("language_server", [Language.ELM], indirect=True)
def test_find_references_across_files(self, language_server: SolidLanguageServer) -> None:
# Test formatMessage function which is defined in Utils.elm and used in Main.elm
utils_path = os.path.join("Utils.elm")
symbols = language_server.request_document_symbols(utils_path).get_all_symbols_and_roots()
formatMessage_symbol = None
for sym in symbols[0]:
if sym.get("name") == "formatMessage":
formatMessage_symbol = sym
break
assert formatMessage_symbol is not None, "Could not find 'formatMessage' symbol in Utils.elm"
# Get references from the definition in Utils.elm
sel_start = formatMessage_symbol["selectionRange"]["start"]
refs = language_server.request_references(utils_path, sel_start["line"], sel_start["character"])
# Verify that we found references
assert refs, "Expected to find references for formatMessage"
# Verify that at least one reference is in Main.elm (where formatMessage is used)
assert any("Main.elm" in ref.get("relativePath", "") for ref in refs), "Expected to find usage of formatMessage in Main.elm"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/elm/test_elm_basic.py",
"license": "MIT License",
"lines": 45,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/erlang/test_erlang_basic.py | """
Basic integration tests for the Erlang language server functionality.
These tests validate the functionality of the language server APIs
like request_references using the test repository.
"""
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
from . import ERLANG_LS_UNAVAILABLE, ERLANG_LS_UNAVAILABLE_REASON
@pytest.mark.erlang
@pytest.mark.skipif(ERLANG_LS_UNAVAILABLE, reason=f"Erlang LS not available: {ERLANG_LS_UNAVAILABLE_REASON}")
class TestErlangLanguageServerBasics:
"""Test basic functionality of the Erlang language server."""
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_language_server_initialization(self, language_server: SolidLanguageServer) -> None:
"""Test that the Erlang language server initializes properly."""
assert language_server is not None
assert language_server.language == Language.ERLANG
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_document_symbols(self, language_server: SolidLanguageServer) -> None:
"""Test document symbols retrieval for Erlang files."""
try:
file_path = "hello.erl"
symbols_tuple = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
assert isinstance(symbols_tuple, tuple)
assert len(symbols_tuple) == 2
all_symbols, root_symbols = symbols_tuple
assert isinstance(all_symbols, list)
assert isinstance(root_symbols, list)
except Exception as e:
if "not fully initialized" in str(e):
pytest.skip("Erlang language server not fully initialized")
else:
raise
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/erlang/test_erlang_basic.py",
"license": "MIT License",
"lines": 34,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/erlang/test_erlang_ignored_dirs.py | from collections.abc import Generator
from pathlib import Path
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
from test.conftest import start_ls_context
from . import ERLANG_LS_UNAVAILABLE, ERLANG_LS_UNAVAILABLE_REASON
# These marks will be applied to all tests in this module
pytestmark = [
pytest.mark.erlang,
pytest.mark.skipif(ERLANG_LS_UNAVAILABLE, reason=f"Erlang LS not available: {ERLANG_LS_UNAVAILABLE_REASON}"),
]
@pytest.fixture(scope="module")
def ls_with_ignored_dirs() -> Generator[SolidLanguageServer, None, None]:
"""Fixture to set up an LS for the erlang test repo with the 'ignored_dir' directory ignored."""
ignored_paths = ["_build", "ignored_dir"]
with start_ls_context(language=Language.ERLANG, ignored_paths=ignored_paths) as ls:
yield ls
@pytest.mark.timeout(60) # Add 60 second timeout
@pytest.mark.xfail(reason="Known timeout issue on Ubuntu CI with Erlang LS server startup", strict=False)
@pytest.mark.parametrize("ls_with_ignored_dirs", [Language.ERLANG], indirect=True)
def test_symbol_tree_ignores_dir(ls_with_ignored_dirs: SolidLanguageServer):
"""Tests that request_full_symbol_tree ignores the configured directory."""
root = ls_with_ignored_dirs.request_full_symbol_tree()[0]
root_children = root["children"]
children_names = {child["name"] for child in root_children}
# Should have src, include, and test directories, but not _build or ignored_dir
expected_dirs = {"src", "include", "test"}
found_expected = expected_dirs.intersection(children_names)
assert len(found_expected) > 0, f"Expected some dirs from {expected_dirs} to be in {children_names}"
assert "_build" not in children_names, f"_build should not be in {children_names}"
assert "ignored_dir" not in children_names, f"ignored_dir should not be in {children_names}"
@pytest.mark.timeout(60) # Add 60 second timeout
@pytest.mark.xfail(reason="Known timeout issue on Ubuntu CI with Erlang LS server startup", strict=False)
@pytest.mark.parametrize("ls_with_ignored_dirs", [Language.ERLANG], indirect=True)
def test_find_references_ignores_dir(ls_with_ignored_dirs: SolidLanguageServer):
"""Tests that find_references ignores the configured directory."""
# Location of user record, which might be referenced in ignored_dir
definition_file = "include/records.hrl"
# Find the user record definition
symbols = ls_with_ignored_dirs.request_document_symbols(definition_file).get_all_symbols_and_roots()
user_symbol = None
for symbol_group in symbols:
user_symbol = next((s for s in symbol_group if "user" in s.get("name", "").lower()), None)
if user_symbol:
break
if not user_symbol or "selectionRange" not in user_symbol:
pytest.skip("User record symbol not found for reference testing")
sel_start = user_symbol["selectionRange"]["start"]
references = ls_with_ignored_dirs.request_references(definition_file, sel_start["line"], sel_start["character"])
# Assert that _build and ignored_dir do not appear in the references
assert not any("_build" in ref["relativePath"] for ref in references), "_build should be ignored"
assert not any("ignored_dir" in ref["relativePath"] for ref in references), "ignored_dir should be ignored"
@pytest.mark.timeout(60) # Add 60 second timeout
@pytest.mark.xfail(reason="Known timeout issue on Ubuntu CI with Erlang LS server startup", strict=False)
@pytest.mark.parametrize("repo_path", [Language.ERLANG], indirect=True)
def test_refs_and_symbols_with_glob_patterns(repo_path: Path) -> None:
"""Tests that refs and symbols with glob patterns are ignored."""
ignored_paths = ["_build*", "ignored_*", "*.tmp"]
with start_ls_context(language=Language.ERLANG, repo_path=str(repo_path), ignored_paths=ignored_paths) as ls:
# Same as in the above tests
root = ls.request_full_symbol_tree()[0]
root_children = root["children"]
children_names = {child["name"] for child in root_children}
# Should have src, include, and test directories, but not _build or ignored_dir
expected_dirs = {"src", "include", "test"}
found_expected = expected_dirs.intersection(children_names)
assert len(found_expected) > 0, f"Expected some dirs from {expected_dirs} to be in {children_names}"
assert "_build" not in children_names, f"_build should not be in {children_names} (glob pattern)"
assert "ignored_dir" not in children_names, f"ignored_dir should not be in {children_names} (glob pattern)"
# Test that the refs and symbols with glob patterns are ignored
definition_file = "include/records.hrl"
# Find the user record definition
symbols = ls.request_document_symbols(definition_file).get_all_symbols_and_roots()
user_symbol = None
for symbol_group in symbols:
user_symbol = next((s for s in symbol_group if "user" in s.get("name", "").lower()), None)
if user_symbol:
break
if user_symbol and "selectionRange" in user_symbol:
sel_start = user_symbol["selectionRange"]["start"]
references = ls.request_references(definition_file, sel_start["line"], sel_start["character"])
# Assert that _build and ignored_dir do not appear in references
assert not any("_build" in ref["relativePath"] for ref in references), "_build should be ignored (glob)"
assert not any("ignored_dir" in ref["relativePath"] for ref in references), "ignored_dir should be ignored (glob)"
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_default_ignored_directories(language_server: SolidLanguageServer):
"""Test that default Erlang directories are ignored."""
# Test that Erlang-specific directories are ignored by default
assert language_server.is_ignored_dirname("_build"), "_build should be ignored"
assert language_server.is_ignored_dirname("ebin"), "ebin should be ignored"
assert language_server.is_ignored_dirname("deps"), "deps should be ignored"
assert language_server.is_ignored_dirname(".rebar3"), ".rebar3 should be ignored"
assert language_server.is_ignored_dirname("_checkouts"), "_checkouts should be ignored"
assert language_server.is_ignored_dirname("node_modules"), "node_modules should be ignored"
# Test that important directories are not ignored
assert not language_server.is_ignored_dirname("src"), "src should not be ignored"
assert not language_server.is_ignored_dirname("include"), "include should not be ignored"
assert not language_server.is_ignored_dirname("test"), "test should not be ignored"
assert not language_server.is_ignored_dirname("priv"), "priv should not be ignored"
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_symbol_tree_excludes_build_dirs(language_server: SolidLanguageServer):
"""Test that symbol tree excludes build and dependency directories."""
symbol_tree = language_server.request_full_symbol_tree()
if symbol_tree:
root = symbol_tree[0]
children_names = {child["name"] for child in root.get("children", [])}
# Build and dependency directories should not appear
ignored_dirs = {"_build", "ebin", "deps", ".rebar3", "_checkouts", "node_modules"}
found_ignored = ignored_dirs.intersection(children_names)
assert len(found_ignored) == 0, f"Found ignored directories in symbol tree: {found_ignored}"
# Important directories should appear
important_dirs = {"src", "include", "test"}
found_important = important_dirs.intersection(children_names)
assert len(found_important) > 0, f"Expected to find important directories: {important_dirs}, got: {children_names}"
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_ignore_compiled_files(language_server: SolidLanguageServer):
"""Test that compiled Erlang files are ignored."""
# Test that beam files are ignored
assert language_server.is_ignored_filename("module.beam"), "BEAM files should be ignored"
assert language_server.is_ignored_filename("app.beam"), "BEAM files should be ignored"
# Test that source files are not ignored
assert not language_server.is_ignored_filename("module.erl"), "Erlang source files should not be ignored"
assert not language_server.is_ignored_filename("records.hrl"), "Header files should not be ignored"
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_rebar_directories_ignored(language_server: SolidLanguageServer):
"""Test that rebar-specific directories are ignored."""
# Test rebar3-specific directories
assert language_server.is_ignored_dirname("_build"), "rebar3 _build should be ignored"
assert language_server.is_ignored_dirname("_checkouts"), "rebar3 _checkouts should be ignored"
assert language_server.is_ignored_dirname(".rebar3"), "rebar3 cache should be ignored"
# Test that rebar.lock and rebar.config are not ignored (they are configuration files)
assert not language_server.is_ignored_filename("rebar.config"), "rebar.config should not be ignored"
assert not language_server.is_ignored_filename("rebar.lock"), "rebar.lock should not be ignored"
@pytest.mark.parametrize("ls_with_ignored_dirs", [Language.ERLANG], indirect=True)
def test_document_symbols_ignores_dirs(ls_with_ignored_dirs: SolidLanguageServer):
"""Test that document symbols from ignored directories are not included."""
# Try to get symbols from a file in ignored directory (should not find it)
try:
ignored_file = "ignored_dir/ignored_module.erl"
symbols = ls_with_ignored_dirs.request_document_symbols(ignored_file).get_all_symbols_and_roots()
# If we get here, the file was found - symbols should be empty or None
if symbols:
assert len(symbols) == 0, "Should not find symbols in ignored directory"
except Exception:
# This is expected - the file should not be accessible
pass
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_erlang_specific_ignore_patterns(language_server: SolidLanguageServer):
"""Test Erlang-specific ignore patterns work correctly."""
erlang_ignored_dirs = ["_build", "ebin", ".rebar3", "_checkouts", "cover"]
# These should be ignored
for dirname in erlang_ignored_dirs:
assert language_server.is_ignored_dirname(dirname), f"{dirname} should be ignored"
# These should not be ignored
erlang_important_dirs = ["src", "include", "test", "priv"]
for dirname in erlang_important_dirs:
assert not language_server.is_ignored_dirname(dirname), f"{dirname} should not be ignored"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/erlang/test_erlang_ignored_dirs.py",
"license": "MIT License",
"lines": 158,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
oraios/serena:test/solidlsp/erlang/test_erlang_symbol_retrieval.py | """
Tests for the Erlang language server symbol-related functionality.
These tests focus on the following methods:
- request_containing_symbol
- request_referencing_symbols
- request_defining_symbol
"""
import os
import pytest
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
from solidlsp.ls_types import SymbolKind
from . import ERLANG_LS_UNAVAILABLE, ERLANG_LS_UNAVAILABLE_REASON
# These marks will be applied to all tests in this module
pytestmark = [
pytest.mark.erlang,
pytest.mark.skipif(ERLANG_LS_UNAVAILABLE, reason=f"Erlang LS not available: {ERLANG_LS_UNAVAILABLE_REASON}"),
]
class TestErlangLanguageServerSymbols:
"""Test the Erlang language server's symbol-related functionality."""
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_request_containing_symbol_function(self, language_server: SolidLanguageServer) -> None:
"""Test request_containing_symbol for a function."""
# Test for a position inside the create_user function
file_path = os.path.join("src", "models.erl")
# Find the create_user function in the file
content = language_server.retrieve_full_file_content(file_path)
lines = content.split("\n")
create_user_line = None
for i, line in enumerate(lines):
if "create_user(" in line and "-spec" not in line:
create_user_line = i + 1 # Go inside the function body
break
if create_user_line is None:
pytest.skip("Could not find create_user function")
containing_symbol = language_server.request_containing_symbol(file_path, create_user_line, 10, include_body=True)
# Verify that we found the containing symbol
if containing_symbol:
assert "create_user" in containing_symbol["name"]
assert containing_symbol["kind"] == SymbolKind.Method or containing_symbol["kind"] == SymbolKind.Function
if "body" in containing_symbol:
assert "create_user" in containing_symbol["body"].get_text()
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_request_containing_symbol_module(self, language_server: SolidLanguageServer) -> None:
"""Test request_containing_symbol for a module."""
# Test for a position inside the models module but outside any function
file_path = os.path.join("src", "models.erl")
# Find the module definition
content = language_server.retrieve_full_file_content(file_path)
lines = content.split("\n")
module_line = None
for i, line in enumerate(lines):
if "-module(models)" in line:
module_line = i + 2 # Go inside the module
break
if module_line is None:
pytest.skip("Could not find models module")
containing_symbol = language_server.request_containing_symbol(file_path, module_line, 5)
# Verify that we found the containing symbol
if containing_symbol:
assert "models" in containing_symbol["name"] or "module" in containing_symbol["name"].lower()
assert containing_symbol["kind"] == SymbolKind.Module or containing_symbol["kind"] == SymbolKind.Class
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_request_containing_symbol_nested(self, language_server: SolidLanguageServer) -> None:
"""Test request_containing_symbol with nested scopes."""
# Test for a position inside a function which is inside a module
file_path = os.path.join("src", "models.erl")
# Find a function inside models module
content = language_server.retrieve_full_file_content(file_path)
lines = content.split("\n")
function_body_line = None
for i, line in enumerate(lines):
if "create_user(" in line and "-spec" not in line:
# Go deeper into the function body where there might be case expressions
for j in range(i + 1, min(i + 10, len(lines))):
if lines[j].strip() and not lines[j].strip().startswith("%"):
function_body_line = j
break
break
if function_body_line is None:
pytest.skip("Could not find function body")
containing_symbol = language_server.request_containing_symbol(file_path, function_body_line, 15)
# Verify that we found the innermost containing symbol (the function)
if containing_symbol:
expected_names = ["create_user", "models"]
assert any(name in containing_symbol["name"] for name in expected_names)
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_request_containing_symbol_none(self, language_server: SolidLanguageServer) -> None:
"""Test request_containing_symbol for a position with no containing symbol."""
# Test for a position outside any function/module (e.g., in comments)
file_path = os.path.join("src", "models.erl")
# Line 1-2 are likely module declaration or comments
containing_symbol = language_server.request_containing_symbol(file_path, 2, 10)
# Should return None or an empty dictionary, or the top-level module
# This is acceptable behavior for module-level positions
assert containing_symbol is None or containing_symbol == {} or "models" in str(containing_symbol)
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_request_referencing_symbols_record(self, language_server: SolidLanguageServer) -> None:
"""Test request_referencing_symbols for a record."""
# Test referencing symbols for user record
file_path = os.path.join("include", "records.hrl")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
user_symbol = None
for symbol_group in symbols:
user_symbol = next((s for s in symbol_group if "user" in s.get("name", "")), None)
if user_symbol:
break
if not user_symbol or "selectionRange" not in user_symbol:
pytest.skip("User record symbol or its selectionRange not found")
sel_start = user_symbol["selectionRange"]["start"]
ref_symbols = [
ref.symbol for ref in language_server.request_referencing_symbols(file_path, sel_start["line"], sel_start["character"])
]
if ref_symbols:
models_references = [
symbol
for symbol in ref_symbols
if "location" in symbol and "uri" in symbol["location"] and "models.erl" in symbol["location"]["uri"]
]
# We expect some references from models.erl
assert len(models_references) >= 0 # At least attempt to find references
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_request_referencing_symbols_function(self, language_server: SolidLanguageServer) -> None:
"""Test request_referencing_symbols for a function."""
# Test referencing symbols for create_user function
file_path = os.path.join("src", "models.erl")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
create_user_symbol = None
for symbol_group in symbols:
create_user_symbol = next((s for s in symbol_group if "create_user" in s.get("name", "")), None)
if create_user_symbol:
break
if not create_user_symbol or "selectionRange" not in create_user_symbol:
pytest.skip("create_user function symbol or its selectionRange not found")
sel_start = create_user_symbol["selectionRange"]["start"]
ref_symbols = [
ref.symbol for ref in language_server.request_referencing_symbols(file_path, sel_start["line"], sel_start["character"])
]
if ref_symbols:
# We might find references from services.erl or test files
service_references = [
symbol
for symbol in ref_symbols
if "location" in symbol
and "uri" in symbol["location"]
and ("services.erl" in symbol["location"]["uri"] or "test" in symbol["location"]["uri"])
]
assert len(service_references) >= 0 # At least attempt to find references
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_request_referencing_symbols_none(self, language_server: SolidLanguageServer) -> None:
"""Test request_referencing_symbols for a position with no symbol."""
file_path = os.path.join("src", "models.erl")
# Line 3 is likely a blank line or comment
try:
ref_symbols = [ref.symbol for ref in language_server.request_referencing_symbols(file_path, 3, 0)]
# If we get here, make sure we got an empty result
assert ref_symbols == [] or ref_symbols is None
except Exception:
# The method might raise an exception for invalid positions
# which is acceptable behavior
pass
# Tests for request_defining_symbol
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_request_defining_symbol_function_call(self, language_server: SolidLanguageServer) -> None:
"""Test request_defining_symbol for a function call."""
# Find a place where models:create_user is called in services.erl
file_path = os.path.join("src", "services.erl")
content = language_server.retrieve_full_file_content(file_path)
lines = content.split("\n")
models_call_line = None
for i, line in enumerate(lines):
if "models:create_user(" in line:
models_call_line = i
break
if models_call_line is None:
pytest.skip("Could not find models:create_user call")
# Try to find the definition of models:create_user
defining_symbol = language_server.request_defining_symbol(file_path, models_call_line, 20)
if defining_symbol:
assert "create_user" in defining_symbol.get("name", "") or "models" in defining_symbol.get("name", "")
if "location" in defining_symbol and "uri" in defining_symbol["location"]:
assert "models.erl" in defining_symbol["location"]["uri"]
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_request_defining_symbol_record_usage(self, language_server: SolidLanguageServer) -> None:
"""Test request_defining_symbol for a record usage."""
# Find a place where #user{} record is used in models.erl
file_path = os.path.join("src", "models.erl")
content = language_server.retrieve_full_file_content(file_path)
lines = content.split("\n")
record_usage_line = None
for i, line in enumerate(lines):
if "#user{" in line:
record_usage_line = i
break
if record_usage_line is None:
pytest.skip("Could not find #user{} record usage")
defining_symbol = language_server.request_defining_symbol(file_path, record_usage_line, 10)
if defining_symbol:
assert "user" in defining_symbol.get("name", "").lower()
if "location" in defining_symbol and "uri" in defining_symbol["location"]:
assert "records.hrl" in defining_symbol["location"]["uri"]
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_request_defining_symbol_module_call(self, language_server: SolidLanguageServer) -> None:
"""Test request_defining_symbol for a module function call."""
# Find a place where utils:validate_input is called
file_path = os.path.join("src", "models.erl")
content = language_server.retrieve_full_file_content(file_path)
lines = content.split("\n")
utils_call_line = None
for i, line in enumerate(lines):
if "validate_email(" in line:
utils_call_line = i
break
if utils_call_line is None:
pytest.skip("Could not find function call in models.erl")
defining_symbol = language_server.request_defining_symbol(file_path, utils_call_line, 15)
if defining_symbol:
assert "validate" in defining_symbol.get("name", "") or "email" in defining_symbol.get("name", "")
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_request_defining_symbol_none(self, language_server: SolidLanguageServer) -> None:
"""Test request_defining_symbol for a position with no symbol."""
# Test for a position with no symbol (e.g., whitespace or comment)
file_path = os.path.join("src", "models.erl")
# Line 3 is likely a blank line or comment
defining_symbol = language_server.request_defining_symbol(file_path, 3, 0)
# Should return None or empty
assert defining_symbol is None or defining_symbol == {}
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_symbol_methods_integration(self, language_server: SolidLanguageServer) -> None:
"""Test integration between different symbol methods."""
file_path = os.path.join("src", "models.erl")
# Find create_user function definition
content = language_server.retrieve_full_file_content(file_path)
lines = content.split("\n")
create_user_line = None
for i, line in enumerate(lines):
if "create_user(" in line and "-spec" not in line:
create_user_line = i
break
if create_user_line is None:
pytest.skip("Could not find create_user function")
# Test containing symbol
containing = language_server.request_containing_symbol(file_path, create_user_line + 2, 10)
if containing:
# Test that we can find references to this symbol
if "location" in containing and "range" in containing["location"]:
start_pos = containing["location"]["range"]["start"]
refs = [
ref.symbol for ref in language_server.request_referencing_symbols(file_path, start_pos["line"], start_pos["character"])
]
# We should find some references or none (both are valid outcomes)
assert isinstance(refs, list)
@pytest.mark.timeout(60) # Add 60 second timeout
@pytest.mark.xfail(
reason="Known intermittent timeout issue in Erlang LS in CI environments. "
"May pass locally but can timeout on slower CI systems.",
strict=False,
)
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_symbol_tree_structure(self, language_server: SolidLanguageServer) -> None:
"""Test that symbol tree structure is correctly built."""
symbol_tree = language_server.request_full_symbol_tree()
# Should get a tree structure
assert len(symbol_tree) > 0
# Should have our test repository structure
root = symbol_tree[0]
assert "children" in root
# Look for src directory
src_dir = None
for child in root["children"]:
if child["name"] == "src":
src_dir = child
break
if src_dir:
# Check for our Erlang modules
file_names = [child["name"] for child in src_dir.get("children", [])]
expected_modules = ["models", "services", "utils", "app"]
found_modules = [name for name in expected_modules if any(name in fname for fname in file_names)]
assert len(found_modules) > 0, f"Expected to find some modules from {expected_modules}, but got {file_names}"
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_request_dir_overview(self, language_server: SolidLanguageServer) -> None:
"""Test request_dir_overview functionality."""
src_overview = language_server.request_dir_overview("src")
# Should get an overview of the src directory
assert src_overview is not None
overview_keys = list(src_overview.keys()) if hasattr(src_overview, "keys") else []
src_files = [key for key in overview_keys if key.startswith("src/") or "src" in key]
assert len(src_files) > 0, f"Expected to find src/ files in overview keys: {overview_keys}"
# Should contain information about our modules
overview_text = str(src_overview).lower()
expected_terms = ["models", "services", "user", "create_user", "gen_server"]
found_terms = [term for term in expected_terms if term in overview_text]
assert len(found_terms) > 0, f"Expected to find some terms from {expected_terms} in overview"
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_containing_symbol_of_record_field(self, language_server: SolidLanguageServer) -> None:
"""Test containing symbol for record field access."""
file_path = os.path.join("src", "models.erl")
# Find a record field access like User#user.name
content = language_server.retrieve_full_file_content(file_path)
lines = content.split("\n")
record_field_line = None
for i, line in enumerate(lines):
if "#user{" in line and ("name" in line or "email" in line or "id" in line):
record_field_line = i
break
if record_field_line is None:
pytest.skip("Could not find record field access")
containing_symbol = language_server.request_containing_symbol(file_path, record_field_line, 10)
if containing_symbol:
# Should be contained within a function
assert "name" in containing_symbol
expected_names = ["create_user", "update_user", "format_user_info"]
assert any(name in containing_symbol["name"] for name in expected_names)
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_containing_symbol_of_spec(self, language_server: SolidLanguageServer) -> None:
"""Test containing symbol for function specs."""
file_path = os.path.join("src", "models.erl")
# Find a -spec directive
content = language_server.retrieve_full_file_content(file_path)
lines = content.split("\n")
spec_line = None
for i, line in enumerate(lines):
if line.strip().startswith("-spec") and "create_user" in line:
spec_line = i
break
if spec_line is None:
pytest.skip("Could not find -spec directive")
containing_symbol = language_server.request_containing_symbol(file_path, spec_line, 5)
if containing_symbol:
# Should be contained within the module or the function it specifies
assert "name" in containing_symbol
expected_names = ["models", "create_user"]
assert any(name in containing_symbol["name"] for name in expected_names)
@pytest.mark.timeout(60) # Add 60 second timeout
@pytest.mark.xfail(
reason="Known intermittent timeout issue in Erlang LS in CI environments. "
"May pass locally but can timeout on slower CI systems, especially macOS. "
"Similar to known Next LS timeout issues.",
strict=False,
)
@pytest.mark.parametrize("language_server", [Language.ERLANG], indirect=True)
def test_referencing_symbols_across_files(self, language_server: SolidLanguageServer) -> None:
"""Test finding references across different files."""
# Test that we can find references to models module functions in services.erl
file_path = os.path.join("src", "models.erl")
symbols = language_server.request_document_symbols(file_path).get_all_symbols_and_roots()
create_user_symbol = None
for symbol_group in symbols:
create_user_symbol = next((s for s in symbol_group if "create_user" in s.get("name", "")), None)
if create_user_symbol:
break
if not create_user_symbol or "selectionRange" not in create_user_symbol:
pytest.skip("create_user function symbol not found")
sel_start = create_user_symbol["selectionRange"]["start"]
ref_symbols = [
ref.symbol for ref in language_server.request_referencing_symbols(file_path, sel_start["line"], sel_start["character"])
]
# Look for cross-file references
cross_file_refs = [
symbol
for symbol in ref_symbols
if "location" in symbol and "uri" in symbol["location"] and not symbol["location"]["uri"].endswith("models.erl")
]
# We might find references in services.erl or test files
if cross_file_refs:
assert len(cross_file_refs) > 0, "Should find some cross-file references"
| {
"repo_id": "oraios/serena",
"file_path": "test/solidlsp/erlang/test_erlang_symbol_retrieval.py",
"license": "MIT License",
"lines": 366,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.