Coverage for src/debputy/lsp/lsp_test_support.py: 100%
29 statements
« prev ^ index » next coverage.py v7.8.2, created at 2026-01-26 19:30 +0000
« prev ^ index » next coverage.py v7.8.2, created at 2026-01-26 19:30 +0000
1import dataclasses
2from typing import List, Optional, FrozenSet, TYPE_CHECKING
4from debputy.lsp.lsp_features import SEMANTIC_TOKENS_LEGEND
5from debputy.util import grouper
7if TYPE_CHECKING:
8 import lsprotocol.types as types
9else:
10 import debputy.lsprotocol.types as types
13@dataclasses.dataclass(slots=True, frozen=True)
14class ResolvedSemanticToken:
15 range: types.Range
16 value: str
17 token_name: str
18 modifiers: frozenset[str] = frozenset()
21def resolved_semantic_token(
22 line_no: int,
23 col_start: int,
24 value: str,
25 token_type: str,
26 *,
27 token_modifiers: frozenset[str] = frozenset(),
28) -> ResolvedSemanticToken:
29 return ResolvedSemanticToken(
30 types.Range(
31 types.Position(
32 line_no,
33 col_start,
34 ),
35 types.Position(
36 line_no,
37 col_start + len(value),
38 ),
39 ),
40 value,
41 token_type,
42 token_modifiers,
43 )
46def resolve_semantic_tokens(
47 lines: list[str],
48 token_result: types.SemanticTokens,
49) -> list[ResolvedSemanticToken]:
50 assert (len(token_result.data) % 5) == 0
51 current_line = 0
52 current_col = 0
53 resolved_tokens = []
54 token_types = SEMANTIC_TOKENS_LEGEND.token_types
55 for token_data in grouper(token_result.data, 5, incomplete="strict"):
56 line_delta, col_start_delta, token_len, token_code, modifier_codes = token_data
57 if line_delta:
58 current_col = 0
59 current_line += line_delta
60 current_col += col_start_delta
61 assert (
62 not modifier_codes
63 ), "TODO: Modifiers not supported (no modifiers defined)"
65 value = lines[current_line][current_col : current_col + token_len]
67 resolved_tokens.append(
68 resolved_semantic_token(
69 current_line,
70 current_col,
71 value,
72 token_types[token_code],
73 ),
74 )
76 return resolved_tokens