Coverage for src/debputy/lsp/languages/lsp_debian_patches_series.py: 71%

168 statements  

« prev     ^ index     » next       coverage.py v7.8.2, created at 2025-10-12 15:06 +0000

1import re 

2from typing import ( 

3 Union, 

4 Optional, 

5 List, 

6 TYPE_CHECKING, 

7 cast, 

8) 

9from collections.abc import Sequence, Iterable, Mapping 

10 

11from debputy.filesystem_scan import VirtualPathBase 

12from debputy.linting.lint_util import LintState 

13from debputy.lsp.debputy_ls import DebputyLanguageServer 

14from debputy.lsp.lsp_features import ( 

15 lint_diagnostics, 

16 lsp_standard_handler, 

17 lsp_completer, 

18 lsp_semantic_tokens_full, 

19 SEMANTIC_TOKEN_TYPES_IDS, 

20 SecondaryLanguage, 

21 LanguageDispatchRule, 

22) 

23from debputy.lsp.quickfixes import ( 

24 propose_remove_range_quick_fix, 

25 propose_correct_text_quick_fix, 

26) 

27from debputy.lsp.text_util import ( 

28 SemanticTokensState, 

29) 

30from debputy.lsprotocol.types import ( 

31 CompletionItem, 

32 CompletionList, 

33 CompletionParams, 

34 TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL, 

35 SemanticTokensParams, 

36 SemanticTokens, 

37 SemanticTokenTypes, 

38 Position, 

39 CompletionItemKind, 

40 CompletionItemLabelDetails, 

41) 

42 

43if TYPE_CHECKING: 

44 import lsprotocol.types as types 

45else: 

46 import debputy.lsprotocol.types as types 

47 

48try: 

49 from debputy.lsp.vendoring._deb822_repro.locatable import ( 

50 Position as TEPosition, 

51 Range as TERange, 

52 START_POSITION, 

53 ) 

54 

55 from pygls.server import LanguageServer 

56 from pygls.workspace import TextDocument 

57except ImportError: 

58 pass 

59 

60 

61_DISPATCH_RULE = LanguageDispatchRule.new_rule( 

62 "debian/patches/series", 

63 None, 

64 "patches/series", 

65 [ 

66 SecondaryLanguage("patches/series", secondary_lookup="path-name"), 

67 ], 

68) 

69 

70 

71lsp_standard_handler(_DISPATCH_RULE, types.TEXT_DOCUMENT_CODE_ACTION) 

72lsp_standard_handler(_DISPATCH_RULE, TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL) 

73 

74_RE_LINE_COMMENT = re.compile(r"^\s*(#(?:.*\S)?)\s*$") 

75_RE_PATCH_LINE = re.compile( 

76 r""" 

77 ^ \s* (?P<patch_name> \S+ ) \s* 

78 (?: (?P<options> [^#\s]+ ) \s* )? 

79 (?: (?P<comment> \# (?:.*\S)? ) \s* )? 

80""", 

81 re.VERBOSE, 

82) 

83_RE_UNNECESSARY_LEADING_PREFIX = re.compile(r"(?:(?:[.]{1,2})?/+)+") 

84_RE_UNNECESSARY_SLASHES = re.compile("//+") 

85 

86 

87def _all_patch_files( 

88 debian_patches: VirtualPathBase, 

89) -> Iterable[VirtualPathBase]: 

90 if not debian_patches.is_dir: 90 ↛ 91line 90 didn't jump to line 91 because the condition on line 90 was never true

91 return 

92 

93 for patch_file in debian_patches.all_paths(): 

94 if patch_file.is_dir or patch_file.path in ( 

95 "debian/patches/series", 

96 "./debian/patches/series", 

97 ): 

98 continue 

99 

100 if patch_file.name.endswith("~"): 100 ↛ 101line 100 didn't jump to line 101 because the condition on line 100 was never true

101 continue 

102 if patch_file.name.startswith((".#", "#", ".")): 102 ↛ 103line 102 didn't jump to line 103 because the condition on line 102 was never true

103 continue 

104 parent = patch_file.parent_dir 

105 if ( 105 ↛ 110line 105 didn't jump to line 110 because the condition on line 105 was never true

106 parent is not None 

107 and parent.path in ("debian/patches", "./debian/patches") 

108 and patch_file.name.endswith(".series") 

109 ): 

110 continue 

111 yield patch_file 

112 

113 

114def _listed_patches( 

115 lines: list[str], 

116) -> Iterable[str]: 

117 for line in lines: 

118 m = _RE_PATCH_LINE.match(line) 

119 if m is None: 

120 continue 

121 filename = m.group(1) 

122 if filename.startswith("#"): 

123 continue 

124 filename = _RE_UNNECESSARY_LEADING_PREFIX.sub("", filename, count=1) 

125 filename = _RE_UNNECESSARY_SLASHES.sub("/", filename) 

126 if not filename: 

127 continue 

128 yield filename 

129 

130 

131@lint_diagnostics(_DISPATCH_RULE) 

132async def _lint_debian_patches_series(lint_state: LintState) -> None: 

133 source_root = lint_state.source_root 

134 if source_root is None: 134 ↛ 135line 134 didn't jump to line 135 because the condition on line 134 was never true

135 return None 

136 

137 dpatches = source_root.lookup("debian/patches/") 

138 if dpatches is None or not dpatches.is_dir: 138 ↛ 139line 138 didn't jump to line 139 because the condition on line 138 was never true

139 return None 

140 

141 used_patches = set() 

142 all_patches = {pf.path for pf in _all_patch_files(dpatches)} 

143 

144 for line_no, line in enumerate(lint_state.lines): 

145 m = _RE_PATCH_LINE.match(line) 

146 if not m: 

147 continue 

148 groups = m.groupdict() 

149 orig_filename = groups["patch_name"] 

150 filename = orig_filename 

151 patch_start_col, patch_end_col = m.span("patch_name") 

152 orig_filename_start_col = patch_start_col 

153 if filename.startswith("#"): 

154 continue 

155 if filename.startswith(("../", "./", "/")): 

156 sm = _RE_UNNECESSARY_LEADING_PREFIX.match(filename) 

157 assert sm is not None 

158 slash_start, slash_end = sm.span(0) 

159 orig_filename_start_col = slash_end 

160 prefix = filename[:orig_filename_start_col] 

161 filename = filename[orig_filename_start_col:] 

162 slash_range = TERange( 

163 TEPosition( 

164 line_no, 

165 patch_start_col + slash_start, 

166 ), 

167 TEPosition( 

168 line_no, 

169 patch_start_col + slash_end, 

170 ), 

171 ) 

172 skip_use_check = False 

173 if ".." in prefix: 

174 diagnostic_title = f'Disallowed prefix "{prefix}"' 

175 severity = cast("LintSeverity", "error") 

176 skip_use_check = True 

177 else: 

178 diagnostic_title = f'Unnecessary prefix "{prefix}"' 

179 severity = cast("LintSeverity", "warning") 

180 lint_state.emit_diagnostic( 

181 slash_range, 

182 diagnostic_title, 

183 severity, 

184 "debputy", 

185 quickfixes=[ 

186 propose_remove_range_quick_fix( 

187 proposed_title=f'Remove prefix "{prefix}"' 

188 ) 

189 ], 

190 ) 

191 if skip_use_check: 

192 continue 

193 if "//" in filename: 193 ↛ 194line 193 didn't jump to line 194 because the condition on line 193 was never true

194 for usm in _RE_UNNECESSARY_SLASHES.finditer(filename): 

195 start_col, end_cold = usm.span() 

196 slash_range = TERange( 

197 TEPosition( 

198 line_no, 

199 orig_filename_start_col + start_col, 

200 ), 

201 TEPosition( 

202 line_no, 

203 orig_filename_start_col + end_cold, 

204 ), 

205 ) 

206 lint_state.emit_diagnostic( 

207 slash_range, 

208 "Unnecessary slashes", 

209 "warning", 

210 "debputy", 

211 quickfixes=[propose_correct_text_quick_fix("/")], 

212 ) 

213 filename = _RE_UNNECESSARY_SLASHES.sub("/", filename) 

214 

215 patch_name_range = TERange( 

216 TEPosition( 

217 line_no, 

218 patch_start_col, 

219 ), 

220 TEPosition( 

221 line_no, 

222 patch_end_col, 

223 ), 

224 ) 

225 if not filename.lower().endswith((".diff", ".patch")): 

226 lint_state.emit_diagnostic( 

227 patch_name_range, 

228 f'Patch not using ".patch" or ".diff" as extension: "{filename}"', 

229 "pedantic", 

230 "debputy", 

231 ) 

232 patch_path = f"{dpatches.path}/{filename}" 

233 if patch_path not in all_patches: 

234 lint_state.emit_diagnostic( 

235 patch_name_range, 

236 f'Non-existing patch "{filename}"', 

237 "error", 

238 "debputy", 

239 ) 

240 elif patch_path in used_patches: 

241 lint_state.emit_diagnostic( 

242 patch_name_range, 

243 f'Duplicate patch: "{filename}"', 

244 "error", 

245 "debputy", 

246 ) 

247 else: 

248 used_patches.add(patch_path) 

249 

250 unused_patches = all_patches - used_patches 

251 for unused_patch in sorted(unused_patches): 

252 patch_name = unused_patch[len(dpatches.path) + 1 :] 

253 line_count = len(lint_state.lines) 

254 file_range = TERange( 

255 TEPosition( 

256 0, 

257 0, 

258 ), 

259 TEPosition( 

260 line_count, 

261 len(lint_state.lines[-1]) if line_count else 0, 

262 ), 

263 ) 

264 lint_state.emit_diagnostic( 

265 file_range, 

266 f'Unused patch: "{patch_name}"', 

267 "warning", 

268 "debputy", 

269 ) 

270 

271 

272@lsp_completer(_DISPATCH_RULE) 

273def _debian_patches_series_completions( 

274 ls: "DebputyLanguageServer", 

275 params: CompletionParams, 

276) -> CompletionList | Sequence[CompletionItem] | None: 

277 doc = ls.workspace.get_text_document(params.text_document.uri) 

278 lint_state = ls.lint_state(doc) 

279 source_root = lint_state.source_root 

280 dpatches = source_root.lookup("debian/patches") if source_root is not None else None 

281 if dpatches is None: 

282 return None 

283 lines = doc.lines 

284 position = doc.position_codec.position_from_client_units(lines, params.position) 

285 line = lines[position.line] 

286 if line.startswith("#"): 

287 return None 

288 try: 

289 line.rindex(" #", 0, position.character) 

290 return None # In an end of line comment 

291 except ValueError: 

292 pass 

293 already_used = set(_listed_patches(lines)) 

294 # `debian/patches + "/"` 

295 dpatches_dir_len = len(dpatches.path) + 1 

296 all_patch_files_gen = ( 

297 p.path[dpatches_dir_len:] for p in _all_patch_files(dpatches) 

298 ) 

299 return [ 

300 CompletionItem( 

301 p, 

302 kind=CompletionItemKind.File, 

303 insert_text=f"{p}\n", 

304 label_details=CompletionItemLabelDetails( 

305 description=f"debian/patches/{p}", 

306 ), 

307 ) 

308 for p in all_patch_files_gen 

309 if p not in already_used 

310 ] 

311 

312 

313@lsp_semantic_tokens_full(_DISPATCH_RULE) 

314async def _debian_patches_semantic_tokens_full( 

315 ls: "DebputyLanguageServer", 

316 request: SemanticTokensParams, 

317) -> SemanticTokens | None: 

318 doc = ls.workspace.get_text_document(request.text_document.uri) 

319 lines = doc.lines 

320 position_codec = doc.position_codec 

321 

322 tokens: list[int] = [] 

323 string_token_code = SEMANTIC_TOKEN_TYPES_IDS[SemanticTokenTypes.String.value] 

324 comment_token_code = SEMANTIC_TOKEN_TYPES_IDS[SemanticTokenTypes.Comment.value] 

325 options_token_code = SEMANTIC_TOKEN_TYPES_IDS[SemanticTokenTypes.Keyword.value] 

326 sem_token_state = SemanticTokensState( 

327 ls, 

328 doc, 

329 lines, 

330 tokens, 

331 ) 

332 

333 async for line_no, line in ls.slow_iter(enumerate(lines)): 

334 if line.isspace(): 

335 continue 

336 m = _RE_LINE_COMMENT.match(line) 

337 if m: 

338 start_col, end_col = m.span(1) 

339 start_pos = position_codec.position_to_client_units( 

340 sem_token_state.lines, 

341 Position( 

342 line_no, 

343 start_col, 

344 ), 

345 ) 

346 sem_token_state.emit_token( 

347 start_pos, 

348 position_codec.client_num_units(line[start_col:end_col]), 

349 comment_token_code, 

350 ) 

351 continue 

352 m = _RE_PATCH_LINE.match(line) 

353 if not m: 353 ↛ 354line 353 didn't jump to line 354 because the condition on line 353 was never true

354 continue 

355 groups = m.groupdict() 

356 _emit_group( 

357 line_no, 

358 string_token_code, 

359 sem_token_state, 

360 "patch_name", 

361 groups, 

362 m, 

363 ) 

364 _emit_group( 

365 line_no, 

366 options_token_code, 

367 sem_token_state, 

368 "options", 

369 groups, 

370 m, 

371 ) 

372 _emit_group( 

373 line_no, 

374 comment_token_code, 

375 sem_token_state, 

376 "comment", 

377 groups, 

378 m, 

379 ) 

380 

381 return SemanticTokens(tokens) 

382 

383 

384def _emit_group( 

385 line_no: int, 

386 token_code: int, 

387 sem_token_state: SemanticTokensState, 

388 group_name: str, 

389 groups: Mapping[str, str], 

390 match: re.Match, 

391) -> None: 

392 value = groups.get(group_name) 

393 if not value: 

394 return None 

395 patch_start_col = match.start(group_name) 

396 position_codec = sem_token_state.doc.position_codec 

397 patch_start_pos = position_codec.position_to_client_units( 

398 sem_token_state.lines, 

399 Position( 

400 line_no, 

401 patch_start_col, 

402 ), 

403 ) 

404 sem_token_state.emit_token( 

405 patch_start_pos, 

406 position_codec.client_num_units(value), 

407 token_code, 

408 )