Coverage for src/debputy/lsp/lsp_debian_patches_series.py: 71%

167 statements  

« prev     ^ index     » next       coverage.py v7.6.0, created at 2025-01-27 13:59 +0000

1import re 

2from typing import ( 

3 Union, 

4 Sequence, 

5 Optional, 

6 Iterable, 

7 List, 

8 Mapping, 

9 TYPE_CHECKING, 

10 cast, 

11) 

12 

13from debputy.filesystem_scan import VirtualPathBase 

14from debputy.linting.lint_util import LintState 

15from debputy.lsp.debputy_ls import DebputyLanguageServer 

16from debputy.lsp.lsp_features import ( 

17 lint_diagnostics, 

18 lsp_standard_handler, 

19 lsp_completer, 

20 lsp_semantic_tokens_full, 

21 SEMANTIC_TOKEN_TYPES_IDS, 

22 SecondaryLanguage, 

23 LanguageDispatchRule, 

24) 

25from debputy.lsp.quickfixes import ( 

26 propose_remove_range_quick_fix, 

27 propose_correct_text_quick_fix, 

28) 

29from debputy.lsp.text_util import ( 

30 SemanticTokensState, 

31) 

32from debputy.lsprotocol.types import ( 

33 CompletionItem, 

34 CompletionList, 

35 CompletionParams, 

36 TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL, 

37 SemanticTokensParams, 

38 SemanticTokens, 

39 SemanticTokenTypes, 

40 Position, 

41 CompletionItemKind, 

42 CompletionItemLabelDetails, 

43) 

44 

45if TYPE_CHECKING: 

46 import lsprotocol.types as types 

47else: 

48 import debputy.lsprotocol.types as types 

49 

50try: 

51 from debputy.lsp.vendoring._deb822_repro.locatable import ( 

52 Position as TEPosition, 

53 Range as TERange, 

54 START_POSITION, 

55 ) 

56 

57 from pygls.server import LanguageServer 

58 from pygls.workspace import TextDocument 

59except ImportError: 

60 pass 

61 

62 

63_DISPATCH_RULE = LanguageDispatchRule.new_rule( 

64 "debian/patches/series", 

65 "patches/series", 

66 [ 

67 SecondaryLanguage("patches/series", filename_based_lookup=True), 

68 ], 

69) 

70 

71 

72lsp_standard_handler(_DISPATCH_RULE, types.TEXT_DOCUMENT_CODE_ACTION) 

73lsp_standard_handler(_DISPATCH_RULE, TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL) 

74 

75_RE_LINE_COMMENT = re.compile(r"^\s*(#(?:.*\S)?)\s*$") 

76_RE_PATCH_LINE = re.compile( 

77 r""" 

78 ^ \s* (?P<patch_name> \S+ ) \s* 

79 (?: (?P<options> [^#\s]+ ) \s* )? 

80 (?: (?P<comment> \# (?:.*\S)? ) \s* )? 

81""", 

82 re.VERBOSE, 

83) 

84_RE_UNNECESSARY_LEADING_PREFIX = re.compile(r"(?:(?:[.]{1,2})?/+)+") 

85_RE_UNNECESSARY_SLASHES = re.compile("//+") 

86 

87 

88def _all_patch_files( 

89 debian_patches: VirtualPathBase, 

90) -> Iterable[VirtualPathBase]: 

91 if not debian_patches.is_dir: 91 ↛ 92line 91 didn't jump to line 92 because the condition on line 91 was never true

92 return 

93 

94 for patch_file in debian_patches.all_paths(): 

95 if patch_file.is_dir or patch_file.path in ( 

96 "debian/patches/series", 

97 "./debian/patches/series", 

98 ): 

99 continue 

100 

101 if patch_file.name.endswith("~"): 101 ↛ 102line 101 didn't jump to line 102 because the condition on line 101 was never true

102 continue 

103 if patch_file.name.startswith((".#", "#")): 103 ↛ 104line 103 didn't jump to line 104 because the condition on line 103 was never true

104 continue 

105 parent = patch_file.parent_dir 

106 if ( 106 ↛ 111line 106 didn't jump to line 111

107 parent is not None 

108 and parent.path in ("debian/patches", "./debian/patches") 

109 and patch_file.name.endswith(".series") 

110 ): 

111 continue 

112 yield patch_file 

113 

114 

115def _listed_patches( 

116 lines: List[str], 

117) -> Iterable[str]: 

118 for line in lines: 

119 m = _RE_PATCH_LINE.match(line) 

120 if m is None: 

121 continue 

122 filename = m.group(1) 

123 if filename.startswith("#"): 

124 continue 

125 filename = _RE_UNNECESSARY_LEADING_PREFIX.sub("", filename, count=1) 

126 filename = _RE_UNNECESSARY_SLASHES.sub("/", filename) 

127 if not filename: 

128 continue 

129 yield filename 

130 

131 

132@lint_diagnostics(_DISPATCH_RULE) 

133def _lint_debian_patches_series(lint_state: LintState) -> None: 

134 source_root = lint_state.source_root 

135 if source_root is None: 135 ↛ 136line 135 didn't jump to line 136 because the condition on line 135 was never true

136 return None 

137 

138 dpatches = source_root.lookup("debian/patches/") 

139 if dpatches is None or not dpatches.is_dir: 139 ↛ 140line 139 didn't jump to line 140 because the condition on line 139 was never true

140 return None 

141 

142 used_patches = set() 

143 all_patches = {pf.path for pf in _all_patch_files(dpatches)} 

144 

145 for line_no, line in enumerate(lint_state.lines): 

146 m = _RE_PATCH_LINE.match(line) 

147 if not m: 

148 continue 

149 groups = m.groupdict() 

150 orig_filename = groups["patch_name"] 

151 filename = orig_filename 

152 patch_start_col, patch_end_col = m.span("patch_name") 

153 orig_filename_start_col = patch_start_col 

154 if filename.startswith("#"): 

155 continue 

156 if filename.startswith(("../", "./", "/")): 

157 sm = _RE_UNNECESSARY_LEADING_PREFIX.match(filename) 

158 assert sm is not None 

159 slash_start, slash_end = sm.span(0) 

160 orig_filename_start_col = slash_end 

161 prefix = filename[:orig_filename_start_col] 

162 filename = filename[orig_filename_start_col:] 

163 slash_range = TERange( 

164 TEPosition( 

165 line_no, 

166 patch_start_col + slash_start, 

167 ), 

168 TEPosition( 

169 line_no, 

170 patch_start_col + slash_end, 

171 ), 

172 ) 

173 skip_use_check = False 

174 if ".." in prefix: 

175 diagnostic_title = f'Disallowed prefix "{prefix}"' 

176 severity = cast("LintSeverity", "error") 

177 skip_use_check = True 

178 else: 

179 diagnostic_title = f'Unnecessary prefix "{prefix}"' 

180 severity = cast("LintSeverity", "warning") 

181 lint_state.emit_diagnostic( 

182 slash_range, 

183 diagnostic_title, 

184 severity, 

185 "debputy", 

186 quickfixes=[ 

187 propose_remove_range_quick_fix( 

188 proposed_title=f'Remove prefix "{prefix}"' 

189 ) 

190 ], 

191 ) 

192 if skip_use_check: 

193 continue 

194 if "//" in filename: 194 ↛ 195line 194 didn't jump to line 195 because the condition on line 194 was never true

195 for usm in _RE_UNNECESSARY_SLASHES.finditer(filename): 

196 start_col, end_cold = usm.span() 

197 slash_range = TERange( 

198 TEPosition( 

199 line_no, 

200 orig_filename_start_col + start_col, 

201 ), 

202 TEPosition( 

203 line_no, 

204 orig_filename_start_col + end_cold, 

205 ), 

206 ) 

207 lint_state.emit_diagnostic( 

208 slash_range, 

209 "Unnecessary slashes", 

210 "warning", 

211 "debputy", 

212 quickfixes=[propose_correct_text_quick_fix("/")], 

213 ) 

214 filename = _RE_UNNECESSARY_SLASHES.sub("/", filename) 

215 

216 patch_name_range = TERange( 

217 TEPosition( 

218 line_no, 

219 patch_start_col, 

220 ), 

221 TEPosition( 

222 line_no, 

223 patch_end_col, 

224 ), 

225 ) 

226 if not filename.lower().endswith((".diff", ".patch")): 

227 lint_state.emit_diagnostic( 

228 patch_name_range, 

229 f'Patch not using ".patch" or ".diff" as extension: "{filename}"', 

230 "pedantic", 

231 "debputy", 

232 ) 

233 patch_path = f"{dpatches.path}/{filename}" 

234 if patch_path not in all_patches: 

235 lint_state.emit_diagnostic( 

236 patch_name_range, 

237 f'Non-existing patch "{filename}"', 

238 "error", 

239 "debputy", 

240 ) 

241 elif patch_path in used_patches: 

242 lint_state.emit_diagnostic( 

243 patch_name_range, 

244 f'Duplicate patch: "{filename}"', 

245 "error", 

246 "debputy", 

247 ) 

248 else: 

249 used_patches.add(patch_path) 

250 

251 unused_patches = all_patches - used_patches 

252 for unused_patch in sorted(unused_patches): 

253 patch_name = unused_patch[len(dpatches.path) + 1 :] 

254 line_count = len(lint_state.lines) 

255 file_range = TERange( 

256 TEPosition( 

257 0, 

258 0, 

259 ), 

260 TEPosition( 

261 line_count, 

262 len(lint_state.lines[-1]) if line_count else 0, 

263 ), 

264 ) 

265 lint_state.emit_diagnostic( 

266 file_range, 

267 f'Unused patch: "{patch_name}"', 

268 "warning", 

269 "debputy", 

270 ) 

271 

272 

273@lsp_completer(_DISPATCH_RULE) 

274def _debian_patches_series_completions( 

275 ls: "DebputyLanguageServer", 

276 params: CompletionParams, 

277) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]: 

278 doc = ls.workspace.get_text_document(params.text_document.uri) 

279 lint_state = ls.lint_state(doc) 

280 source_root = lint_state.source_root 

281 dpatches = source_root.lookup("debian/patches") if source_root is not None else None 

282 if dpatches is None: 

283 return None 

284 lines = doc.lines 

285 position = doc.position_codec.position_from_client_units(lines, params.position) 

286 line = lines[position.line] 

287 if line.startswith("#"): 

288 return None 

289 try: 

290 line.rindex(" #", 0, position.character) 

291 return None # In an end of line comment 

292 except ValueError: 

293 pass 

294 already_used = set(_listed_patches(lines)) 

295 # `debian/patches + "/"` 

296 dpatches_dir_len = len(dpatches.path) + 1 

297 all_patch_files_gen = ( 

298 p.path[dpatches_dir_len:] for p in _all_patch_files(dpatches) 

299 ) 

300 return [ 

301 CompletionItem( 

302 p, 

303 kind=CompletionItemKind.File, 

304 insert_text=f"{p}\n", 

305 label_details=CompletionItemLabelDetails( 

306 description=f"debian/patches/{p}", 

307 ), 

308 ) 

309 for p in all_patch_files_gen 

310 if p not in already_used 

311 ] 

312 

313 

314@lsp_semantic_tokens_full(_DISPATCH_RULE) 

315def _debian_patches_semantic_tokens_full( 

316 ls: "DebputyLanguageServer", 

317 request: SemanticTokensParams, 

318) -> Optional[SemanticTokens]: 

319 doc = ls.workspace.get_text_document(request.text_document.uri) 

320 lines = doc.lines 

321 position_codec = doc.position_codec 

322 

323 tokens: List[int] = [] 

324 string_token_code = SEMANTIC_TOKEN_TYPES_IDS[SemanticTokenTypes.String.value] 

325 comment_token_code = SEMANTIC_TOKEN_TYPES_IDS[SemanticTokenTypes.Comment.value] 

326 options_token_code = SEMANTIC_TOKEN_TYPES_IDS[SemanticTokenTypes.Keyword.value] 

327 sem_token_state = SemanticTokensState( 

328 ls, 

329 doc, 

330 lines, 

331 tokens, 

332 ) 

333 

334 for line_no, line in enumerate(lines): 

335 if line.isspace(): 

336 continue 

337 m = _RE_LINE_COMMENT.match(line) 

338 if m: 

339 start_col, end_col = m.span(1) 

340 start_pos = position_codec.position_to_client_units( 

341 sem_token_state.lines, 

342 Position( 

343 line_no, 

344 start_col, 

345 ), 

346 ) 

347 sem_token_state.emit_token( 

348 start_pos, 

349 position_codec.client_num_units(line[start_col:end_col]), 

350 comment_token_code, 

351 ) 

352 continue 

353 m = _RE_PATCH_LINE.match(line) 

354 if not m: 354 ↛ 355line 354 didn't jump to line 355 because the condition on line 354 was never true

355 continue 

356 groups = m.groupdict() 

357 _emit_group( 

358 line_no, 

359 string_token_code, 

360 sem_token_state, 

361 "patch_name", 

362 groups, 

363 m, 

364 ) 

365 _emit_group( 

366 line_no, 

367 options_token_code, 

368 sem_token_state, 

369 "options", 

370 groups, 

371 m, 

372 ) 

373 _emit_group( 

374 line_no, 

375 comment_token_code, 

376 sem_token_state, 

377 "comment", 

378 groups, 

379 m, 

380 ) 

381 

382 return SemanticTokens(tokens) 

383 

384 

385def _emit_group( 

386 line_no: int, 

387 token_code: int, 

388 sem_token_state: SemanticTokensState, 

389 group_name: str, 

390 groups: Mapping[str, str], 

391 match: re.Match, 

392) -> None: 

393 value = groups.get(group_name) 

394 if not value: 

395 return None 

396 patch_start_col, patch_end_col = match.span(group_name) 

397 position_codec = sem_token_state.doc.position_codec 

398 patch_start_pos = position_codec.position_to_client_units( 

399 sem_token_state.lines, 

400 Position( 

401 line_no, 

402 patch_start_col, 

403 ), 

404 ) 

405 sem_token_state.emit_token( 

406 patch_start_pos, 

407 position_codec.client_num_units(value), 

408 token_code, 

409 )