Coverage for src/debputy/lsp/languages/lsp_debian_patches_series.py: 71%
167 statements
« prev ^ index » next coverage.py v7.8.2, created at 2025-09-07 09:27 +0000
« prev ^ index » next coverage.py v7.8.2, created at 2025-09-07 09:27 +0000
1import re
2from typing import (
3 Union,
4 Sequence,
5 Optional,
6 Iterable,
7 List,
8 Mapping,
9 TYPE_CHECKING,
10 cast,
11)
13from debputy.filesystem_scan import VirtualPathBase
14from debputy.linting.lint_util import LintState
15from debputy.lsp.debputy_ls import DebputyLanguageServer
16from debputy.lsp.lsp_features import (
17 lint_diagnostics,
18 lsp_standard_handler,
19 lsp_completer,
20 lsp_semantic_tokens_full,
21 SEMANTIC_TOKEN_TYPES_IDS,
22 SecondaryLanguage,
23 LanguageDispatchRule,
24)
25from debputy.lsp.quickfixes import (
26 propose_remove_range_quick_fix,
27 propose_correct_text_quick_fix,
28)
29from debputy.lsp.text_util import (
30 SemanticTokensState,
31)
32from debputy.lsprotocol.types import (
33 CompletionItem,
34 CompletionList,
35 CompletionParams,
36 TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL,
37 SemanticTokensParams,
38 SemanticTokens,
39 SemanticTokenTypes,
40 Position,
41 CompletionItemKind,
42 CompletionItemLabelDetails,
43)
45if TYPE_CHECKING:
46 import lsprotocol.types as types
47else:
48 import debputy.lsprotocol.types as types
50try:
51 from debputy.lsp.vendoring._deb822_repro.locatable import (
52 Position as TEPosition,
53 Range as TERange,
54 START_POSITION,
55 )
57 from pygls.server import LanguageServer
58 from pygls.workspace import TextDocument
59except ImportError:
60 pass
63_DISPATCH_RULE = LanguageDispatchRule.new_rule(
64 "debian/patches/series",
65 None,
66 "patches/series",
67 [
68 SecondaryLanguage("patches/series", secondary_lookup="path-name"),
69 ],
70)
73lsp_standard_handler(_DISPATCH_RULE, types.TEXT_DOCUMENT_CODE_ACTION)
74lsp_standard_handler(_DISPATCH_RULE, TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL)
76_RE_LINE_COMMENT = re.compile(r"^\s*(#(?:.*\S)?)\s*$")
77_RE_PATCH_LINE = re.compile(
78 r"""
79 ^ \s* (?P<patch_name> \S+ ) \s*
80 (?: (?P<options> [^#\s]+ ) \s* )?
81 (?: (?P<comment> \# (?:.*\S)? ) \s* )?
82""",
83 re.VERBOSE,
84)
85_RE_UNNECESSARY_LEADING_PREFIX = re.compile(r"(?:(?:[.]{1,2})?/+)+")
86_RE_UNNECESSARY_SLASHES = re.compile("//+")
89def _all_patch_files(
90 debian_patches: VirtualPathBase,
91) -> Iterable[VirtualPathBase]:
92 if not debian_patches.is_dir: 92 ↛ 93line 92 didn't jump to line 93 because the condition on line 92 was never true
93 return
95 for patch_file in debian_patches.all_paths():
96 if patch_file.is_dir or patch_file.path in (
97 "debian/patches/series",
98 "./debian/patches/series",
99 ):
100 continue
102 if patch_file.name.endswith("~"): 102 ↛ 103line 102 didn't jump to line 103 because the condition on line 102 was never true
103 continue
104 if patch_file.name.startswith((".#", "#", ".")): 104 ↛ 105line 104 didn't jump to line 105 because the condition on line 104 was never true
105 continue
106 parent = patch_file.parent_dir
107 if ( 107 ↛ 112line 107 didn't jump to line 112 because the condition on line 107 was never true
108 parent is not None
109 and parent.path in ("debian/patches", "./debian/patches")
110 and patch_file.name.endswith(".series")
111 ):
112 continue
113 yield patch_file
116def _listed_patches(
117 lines: List[str],
118) -> Iterable[str]:
119 for line in lines:
120 m = _RE_PATCH_LINE.match(line)
121 if m is None:
122 continue
123 filename = m.group(1)
124 if filename.startswith("#"):
125 continue
126 filename = _RE_UNNECESSARY_LEADING_PREFIX.sub("", filename, count=1)
127 filename = _RE_UNNECESSARY_SLASHES.sub("/", filename)
128 if not filename:
129 continue
130 yield filename
133@lint_diagnostics(_DISPATCH_RULE)
134async def _lint_debian_patches_series(lint_state: LintState) -> None:
135 source_root = lint_state.source_root
136 if source_root is None: 136 ↛ 137line 136 didn't jump to line 137 because the condition on line 136 was never true
137 return None
139 dpatches = source_root.lookup("debian/patches/")
140 if dpatches is None or not dpatches.is_dir: 140 ↛ 141line 140 didn't jump to line 141 because the condition on line 140 was never true
141 return None
143 used_patches = set()
144 all_patches = {pf.path for pf in _all_patch_files(dpatches)}
146 for line_no, line in enumerate(lint_state.lines):
147 m = _RE_PATCH_LINE.match(line)
148 if not m:
149 continue
150 groups = m.groupdict()
151 orig_filename = groups["patch_name"]
152 filename = orig_filename
153 patch_start_col, patch_end_col = m.span("patch_name")
154 orig_filename_start_col = patch_start_col
155 if filename.startswith("#"):
156 continue
157 if filename.startswith(("../", "./", "/")):
158 sm = _RE_UNNECESSARY_LEADING_PREFIX.match(filename)
159 assert sm is not None
160 slash_start, slash_end = sm.span(0)
161 orig_filename_start_col = slash_end
162 prefix = filename[:orig_filename_start_col]
163 filename = filename[orig_filename_start_col:]
164 slash_range = TERange(
165 TEPosition(
166 line_no,
167 patch_start_col + slash_start,
168 ),
169 TEPosition(
170 line_no,
171 patch_start_col + slash_end,
172 ),
173 )
174 skip_use_check = False
175 if ".." in prefix:
176 diagnostic_title = f'Disallowed prefix "{prefix}"'
177 severity = cast("LintSeverity", "error")
178 skip_use_check = True
179 else:
180 diagnostic_title = f'Unnecessary prefix "{prefix}"'
181 severity = cast("LintSeverity", "warning")
182 lint_state.emit_diagnostic(
183 slash_range,
184 diagnostic_title,
185 severity,
186 "debputy",
187 quickfixes=[
188 propose_remove_range_quick_fix(
189 proposed_title=f'Remove prefix "{prefix}"'
190 )
191 ],
192 )
193 if skip_use_check:
194 continue
195 if "//" in filename: 195 ↛ 196line 195 didn't jump to line 196 because the condition on line 195 was never true
196 for usm in _RE_UNNECESSARY_SLASHES.finditer(filename):
197 start_col, end_cold = usm.span()
198 slash_range = TERange(
199 TEPosition(
200 line_no,
201 orig_filename_start_col + start_col,
202 ),
203 TEPosition(
204 line_no,
205 orig_filename_start_col + end_cold,
206 ),
207 )
208 lint_state.emit_diagnostic(
209 slash_range,
210 "Unnecessary slashes",
211 "warning",
212 "debputy",
213 quickfixes=[propose_correct_text_quick_fix("/")],
214 )
215 filename = _RE_UNNECESSARY_SLASHES.sub("/", filename)
217 patch_name_range = TERange(
218 TEPosition(
219 line_no,
220 patch_start_col,
221 ),
222 TEPosition(
223 line_no,
224 patch_end_col,
225 ),
226 )
227 if not filename.lower().endswith((".diff", ".patch")):
228 lint_state.emit_diagnostic(
229 patch_name_range,
230 f'Patch not using ".patch" or ".diff" as extension: "{filename}"',
231 "pedantic",
232 "debputy",
233 )
234 patch_path = f"{dpatches.path}/{filename}"
235 if patch_path not in all_patches:
236 lint_state.emit_diagnostic(
237 patch_name_range,
238 f'Non-existing patch "{filename}"',
239 "error",
240 "debputy",
241 )
242 elif patch_path in used_patches:
243 lint_state.emit_diagnostic(
244 patch_name_range,
245 f'Duplicate patch: "{filename}"',
246 "error",
247 "debputy",
248 )
249 else:
250 used_patches.add(patch_path)
252 unused_patches = all_patches - used_patches
253 for unused_patch in sorted(unused_patches):
254 patch_name = unused_patch[len(dpatches.path) + 1 :]
255 line_count = len(lint_state.lines)
256 file_range = TERange(
257 TEPosition(
258 0,
259 0,
260 ),
261 TEPosition(
262 line_count,
263 len(lint_state.lines[-1]) if line_count else 0,
264 ),
265 )
266 lint_state.emit_diagnostic(
267 file_range,
268 f'Unused patch: "{patch_name}"',
269 "warning",
270 "debputy",
271 )
274@lsp_completer(_DISPATCH_RULE)
275def _debian_patches_series_completions(
276 ls: "DebputyLanguageServer",
277 params: CompletionParams,
278) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]:
279 doc = ls.workspace.get_text_document(params.text_document.uri)
280 lint_state = ls.lint_state(doc)
281 source_root = lint_state.source_root
282 dpatches = source_root.lookup("debian/patches") if source_root is not None else None
283 if dpatches is None:
284 return None
285 lines = doc.lines
286 position = doc.position_codec.position_from_client_units(lines, params.position)
287 line = lines[position.line]
288 if line.startswith("#"):
289 return None
290 try:
291 line.rindex(" #", 0, position.character)
292 return None # In an end of line comment
293 except ValueError:
294 pass
295 already_used = set(_listed_patches(lines))
296 # `debian/patches + "/"`
297 dpatches_dir_len = len(dpatches.path) + 1
298 all_patch_files_gen = (
299 p.path[dpatches_dir_len:] for p in _all_patch_files(dpatches)
300 )
301 return [
302 CompletionItem(
303 p,
304 kind=CompletionItemKind.File,
305 insert_text=f"{p}\n",
306 label_details=CompletionItemLabelDetails(
307 description=f"debian/patches/{p}",
308 ),
309 )
310 for p in all_patch_files_gen
311 if p not in already_used
312 ]
315@lsp_semantic_tokens_full(_DISPATCH_RULE)
316async def _debian_patches_semantic_tokens_full(
317 ls: "DebputyLanguageServer",
318 request: SemanticTokensParams,
319) -> Optional[SemanticTokens]:
320 doc = ls.workspace.get_text_document(request.text_document.uri)
321 lines = doc.lines
322 position_codec = doc.position_codec
324 tokens: List[int] = []
325 string_token_code = SEMANTIC_TOKEN_TYPES_IDS[SemanticTokenTypes.String.value]
326 comment_token_code = SEMANTIC_TOKEN_TYPES_IDS[SemanticTokenTypes.Comment.value]
327 options_token_code = SEMANTIC_TOKEN_TYPES_IDS[SemanticTokenTypes.Keyword.value]
328 sem_token_state = SemanticTokensState(
329 ls,
330 doc,
331 lines,
332 tokens,
333 )
335 async for line_no, line in ls.slow_iter(enumerate(lines)):
336 if line.isspace():
337 continue
338 m = _RE_LINE_COMMENT.match(line)
339 if m:
340 start_col, end_col = m.span(1)
341 start_pos = position_codec.position_to_client_units(
342 sem_token_state.lines,
343 Position(
344 line_no,
345 start_col,
346 ),
347 )
348 sem_token_state.emit_token(
349 start_pos,
350 position_codec.client_num_units(line[start_col:end_col]),
351 comment_token_code,
352 )
353 continue
354 m = _RE_PATCH_LINE.match(line)
355 if not m: 355 ↛ 356line 355 didn't jump to line 356 because the condition on line 355 was never true
356 continue
357 groups = m.groupdict()
358 _emit_group(
359 line_no,
360 string_token_code,
361 sem_token_state,
362 "patch_name",
363 groups,
364 m,
365 )
366 _emit_group(
367 line_no,
368 options_token_code,
369 sem_token_state,
370 "options",
371 groups,
372 m,
373 )
374 _emit_group(
375 line_no,
376 comment_token_code,
377 sem_token_state,
378 "comment",
379 groups,
380 m,
381 )
383 return SemanticTokens(tokens)
386def _emit_group(
387 line_no: int,
388 token_code: int,
389 sem_token_state: SemanticTokensState,
390 group_name: str,
391 groups: Mapping[str, str],
392 match: re.Match,
393) -> None:
394 value = groups.get(group_name)
395 if not value:
396 return None
397 patch_start_col = match.start(group_name)
398 position_codec = sem_token_state.doc.position_codec
399 patch_start_pos = position_codec.position_to_client_units(
400 sem_token_state.lines,
401 Position(
402 line_no,
403 patch_start_col,
404 ),
405 )
406 sem_token_state.emit_token(
407 patch_start_pos,
408 position_codec.client_num_units(value),
409 token_code,
410 )