Coverage for src/debputy/deb_packaging_support.py: 24%

831 statements  

« prev     ^ index     » next       coverage.py v7.8.2, created at 2025-10-12 15:06 +0000

1import collections 

2import contextlib 

3import dataclasses 

4import datetime 

5import functools 

6import hashlib 

7import itertools 

8import operator 

9import os 

10import re 

11import shutil 

12import subprocess 

13import tempfile 

14import textwrap 

15from contextlib import ExitStack, suppress 

16from tempfile import mkstemp 

17from typing import ( 

18 List, 

19 Optional, 

20 Set, 

21 Dict, 

22 Tuple, 

23 Literal, 

24 TypeVar, 

25 FrozenSet, 

26 cast, 

27 Any, 

28 Union, 

29 AbstractSet, 

30 TYPE_CHECKING, 

31) 

32from collections.abc import Iterable, Sequence, Iterator, Mapping 

33 

34import debian.deb822 

35from debian.changelog import Changelog 

36from debian.deb822 import Deb822 

37from debputy._deb_options_profiles import DebBuildOptionsAndProfiles 

38from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable 

39from debputy.elf_util import find_all_elf_files, ELF_MAGIC 

40from debputy.exceptions import DebputyDpkgGensymbolsError, PureVirtualPathError 

41from debputy.filesystem_scan import FSPath, FSControlRootDir, VirtualPathBase 

42from debputy.maintscript_snippet import ( 

43 ALL_CONTROL_SCRIPTS, 

44 MaintscriptSnippetContainer, 

45 STD_CONTROL_SCRIPTS, 

46) 

47from debputy.packager_provided_files import PackagerProvidedFile 

48from debputy.packages import BinaryPackage, SourcePackage 

49from debputy.packaging.alternatives import process_alternatives 

50from debputy.packaging.debconf_templates import process_debconf_templates 

51from debputy.packaging.makeshlibs import ( 

52 compute_shlibs, 

53 ShlibsContent, 

54 generate_shlib_dirs, 

55 resolve_reserved_provided_file, 

56) 

57from debputy.plugin.api.feature_set import PluginProvidedFeatureSet 

58from debputy.plugin.api.impl import ServiceRegistryImpl 

59from debputy.plugin.api.impl_types import ( 

60 MetadataOrMaintscriptDetector, 

61 PackageDataTable, 

62 ServiceManagerDetails, 

63) 

64from debputy.plugin.api.spec import ( 

65 FlushableSubstvars, 

66 VirtualPath, 

67 PackageProcessingContext, 

68 ServiceDefinition, 

69) 

70from debputy.plugins.debputy.binary_package_rules import ServiceRule 

71from debputy.util import ( 

72 _error, 

73 ensure_dir, 

74 assume_not_none, 

75 resolve_perl_config, 

76 perlxs_api_dependency, 

77 detect_fakeroot, 

78 grouper, 

79 _info, 

80 xargs, 

81 escape_shell, 

82 generated_content_dir, 

83 print_command, 

84 _warn, 

85) 

86 

87if TYPE_CHECKING: 

88 from debputy.highlevel_manifest import ( 

89 HighLevelManifest, 

90 PackageTransformationDefinition, 

91 BinaryPackageData, 

92 ) 

93 

94 

95VP = TypeVar("VP", bound=VirtualPath, covariant=True) 

96 

97_T64_REGEX = re.compile("^lib.*t64(?:-nss)?$") 

98_T64_PROVIDES = "t64:Provides" 

99 

100 

101def generate_md5sums_file( 

102 control_output_dir: VirtualPathBase, 

103 fs_root: VirtualPath, 

104) -> None: 

105 conffiles = control_output_dir.get("conffiles") 

106 exclude = set() 

107 if conffiles and conffiles.is_file: 

108 with conffiles.open() as fd: 

109 for line in fd: 

110 if not line.startswith("/"): 

111 continue 

112 exclude.add("." + line.rstrip("\n")) 

113 files_to_checksum = sorted( 

114 ( 

115 path 

116 for path in fs_root.all_paths() 

117 if path.is_file and path.path not in exclude 

118 ), 

119 # Sort in the same order as dh_md5sums, which is not quite the same as dpkg/`all_paths()` 

120 # Compare `.../doc/...` vs `.../doc-base/...` if you want to see the difference between 

121 # the two approaches. 

122 key=lambda p: p.path, 

123 ) 

124 if not files_to_checksum: 

125 return 

126 with control_output_dir.open_child("md5sums", "w") as md5fd: 

127 for member in files_to_checksum: 

128 path = member.path 

129 assert path.startswith("./") 

130 path = path[2:] 

131 with member.open(byte_io=True) as f: 

132 file_hash = hashlib.md5() 

133 while chunk := f.read(8192): 

134 file_hash.update(chunk) 

135 md5fd.write(f"{file_hash.hexdigest()} {path}\n") 

136 

137 

138def install_or_generate_conffiles( 

139 ctrl_root: FSPath | FSControlRootDir, 

140 fs_root: VirtualPath, 

141 reserved_packager_provided_files: dict[str, list[PackagerProvidedFile]], 

142) -> None: 

143 provided_conffiles_file = resolve_reserved_provided_file( 

144 "conffiles", 

145 reserved_packager_provided_files, 

146 ) 

147 if ( 147 ↛ 152line 147 didn't jump to line 152 because the condition on line 147 was never true

148 provided_conffiles_file 

149 and provided_conffiles_file.is_file 

150 and provided_conffiles_file.size > 0 

151 ): 

152 ctrl_root.insert_file_from_fs_path( 

153 "conffiles", 

154 provided_conffiles_file.fs_path, 

155 mode=0o644, 

156 reference_path=provided_conffiles_file, 

157 ) 

158 etc_dir = fs_root.lookup("etc") 

159 if etc_dir: 

160 _add_conffiles(ctrl_root, (p for p in etc_dir.all_paths() if p.is_file)) 

161 

162 

163PERL_DEP_PROGRAM = 1 

164PERL_DEP_INDEP_PM_MODULE = 2 

165PERL_DEP_XS_MODULE = 4 

166PERL_DEP_ARCH_PM_MODULE = 8 

167PERL_DEP_MA_ANY_INCOMPATIBLE_TYPES = ~(PERL_DEP_PROGRAM | PERL_DEP_INDEP_PM_MODULE) 

168 

169 

170@functools.lru_cache(2) # In practice, param will be "perl" or "perl-base" 

171def _dpkg_perl_version(package: str) -> str: 

172 dpkg_version = None 

173 lines = ( 

174 subprocess.check_output(["dpkg", "-s", package]) 

175 .decode("utf-8") 

176 .splitlines(keepends=False) 

177 ) 

178 for line in lines: 

179 if line.startswith("Version: "): 

180 dpkg_version = line[8:].strip() 

181 break 

182 assert dpkg_version is not None 

183 return dpkg_version 

184 

185 

186def handle_perl_code( 

187 dctrl_bin: BinaryPackage, 

188 dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable, 

189 fs_root: FSPath, 

190 substvars: FlushableSubstvars, 

191) -> None: 

192 perl_config_data = resolve_perl_config(dpkg_architecture_variables, dctrl_bin) 

193 detected_dep_requirements = 0 

194 

195 # MakeMaker always makes lib and share dirs, but typically only one directory is actually used. 

196 for perl_inc_dir in (perl_config_data.vendorarch, perl_config_data.vendorlib): 

197 p = fs_root.lookup(perl_inc_dir) 

198 if p and p.is_dir: 

199 p.prune_if_empty_dir() 

200 

201 # FIXME: 80% of this belongs in a metadata detector, but that requires us to expose .walk() in the public API, 

202 # which will not be today. 

203 for d, pm_mode in [ 

204 (perl_config_data.vendorlib, PERL_DEP_INDEP_PM_MODULE), 

205 (perl_config_data.vendorarch, PERL_DEP_ARCH_PM_MODULE), 

206 ]: 

207 inc_dir = fs_root.lookup(d) 

208 if not inc_dir: 

209 continue 

210 for path in inc_dir.all_paths(): 

211 if not path.is_file: 

212 continue 

213 if path.name.endswith(".so"): 

214 detected_dep_requirements |= PERL_DEP_XS_MODULE 

215 elif path.name.endswith(".pm"): 

216 detected_dep_requirements |= pm_mode 

217 

218 for path, children in fs_root.walk(): 

219 if path.path == "./usr/share/doc": 

220 children.clear() 

221 continue 

222 if ( 

223 not path.is_file 

224 or not path.has_fs_path 

225 or not (path.is_executable or path.name.endswith(".pl")) 

226 ): 

227 continue 

228 

229 interpreter = path.interpreter() 

230 if interpreter is not None and interpreter.command_full_basename == "perl": 

231 detected_dep_requirements |= PERL_DEP_PROGRAM 

232 

233 if not detected_dep_requirements: 

234 return 

235 dpackage = "perl" 

236 # FIXME: Currently, dh_perl supports perl-base via manual toggle. 

237 

238 dependency = dpackage 

239 if not (detected_dep_requirements & PERL_DEP_MA_ANY_INCOMPATIBLE_TYPES): 

240 dependency += ":any" 

241 

242 if detected_dep_requirements & PERL_DEP_XS_MODULE: 

243 dpkg_version = _dpkg_perl_version(dpackage) 

244 dependency += f" (>= {dpkg_version})" 

245 substvars.add_dependency("perl:Depends", dependency) 

246 

247 if detected_dep_requirements & (PERL_DEP_XS_MODULE | PERL_DEP_ARCH_PM_MODULE): 

248 substvars.add_dependency("perl:Depends", perlxs_api_dependency()) 

249 

250 

251def usr_local_transformation(dctrl: BinaryPackage, fs_root: VirtualPath) -> None: 

252 path = fs_root.lookup("./usr/local") 

253 if path and any(path.iterdir): 

254 # There are two key issues: 

255 # 1) Getting the generated maintscript carried on to the final maintscript 

256 # 2) Making sure that manifest created directories do not trigger the "unused error". 

257 _error( 

258 f"Replacement of /usr/local paths is currently not supported in debputy (triggered by: {dctrl.name})." 

259 ) 

260 

261 

262def _find_and_analyze_systemd_service_files( 

263 fs_root: VirtualPath, 

264 systemd_service_dir: Literal["system", "user"], 

265) -> Iterable[VirtualPath]: 

266 service_dirs = [ 

267 f"./usr/lib/systemd/{systemd_service_dir}", 

268 f"./lib/systemd/{systemd_service_dir}", 

269 ] 

270 aliases: dict[str, list[str]] = collections.defaultdict(list) 

271 seen = set() 

272 all_files = [] 

273 

274 for d in service_dirs: 

275 system_dir = fs_root.lookup(d) 

276 if not system_dir: 

277 continue 

278 for child in system_dir.iterdir: 

279 if child.is_symlink: 

280 dest = os.path.basename(child.readlink()) 

281 aliases[dest].append(child.name) 

282 elif child.is_file and child.name not in seen: 

283 seen.add(child.name) 

284 all_files.append(child) 

285 

286 return all_files 

287 

288 

289def detect_systemd_user_service_files( 

290 dctrl: BinaryPackage, 

291 fs_root: VirtualPath, 

292) -> None: 

293 for service_file in _find_and_analyze_systemd_service_files(fs_root, "user"): 

294 _error( 

295 f'Sorry, systemd user services files are not supported at the moment (saw "{service_file.path}"' 

296 f" in {dctrl.name})" 

297 ) 

298 

299 

300# Generally, this should match the release date of oldstable or oldoldstable 

301_DCH_PRUNE_CUT_OFF_DATE = datetime.date(2019, 7, 6) 

302_DCH_MIN_NUM_OF_ENTRIES = 4 

303 

304 

305def _prune_dch_file( 

306 package: BinaryPackage, 

307 path: VirtualPath, 

308 is_changelog: bool, 

309 keep_versions: set[str] | None, 

310 *, 

311 trim: bool = True, 

312) -> tuple[bool, set[str] | None]: 

313 # TODO: Process `d/changelog` once 

314 # Note we cannot assume that changelog_file is always `d/changelog` as you can have 

315 # per-package changelogs. 

316 with path.open() as fd: 

317 dch = Changelog(fd) 

318 shortened = False 

319 important_entries = 0 

320 binnmu_entries = [] 

321 if is_changelog: 

322 kept_entries = [] 

323 for block in dch: 

324 if block.other_pairs.get("binary-only", "no") == "yes": 

325 # Always keep binNMU entries (they are always in the top) and they do not count 

326 # towards our kept_entries limit 

327 binnmu_entries.append(block) 

328 continue 

329 block_date = block.date 

330 if block_date is None: 

331 _error(f"The Debian changelog was missing date in sign off line") 

332 entry_date = datetime.datetime.strptime( 

333 block_date, "%a, %d %b %Y %H:%M:%S %z" 

334 ).date() 

335 if ( 

336 trim 

337 and entry_date < _DCH_PRUNE_CUT_OFF_DATE 

338 and important_entries >= _DCH_MIN_NUM_OF_ENTRIES 

339 ): 

340 shortened = True 

341 break 

342 # Match debhelper in incrementing after the check. 

343 important_entries += 1 

344 kept_entries.append(block) 

345 else: 

346 assert keep_versions is not None 

347 # The NEWS files should match the version for the dch to avoid lintian warnings. 

348 # If that means we remove all entries in the NEWS file, then we delete the NEWS 

349 # file (see #1021607) 

350 kept_entries = [b for b in dch if b.version in keep_versions] 

351 shortened = len(dch) > len(kept_entries) 

352 if shortened and not kept_entries: 

353 path.unlink() 

354 return True, None 

355 

356 if not shortened and not binnmu_entries: 

357 return False, None 

358 

359 parent_dir = assume_not_none(path.parent_dir) 

360 

361 with ( 

362 path.replace_fs_path_content() as fs_path, 

363 open(fs_path, "w", encoding="utf-8") as fd, 

364 ): 

365 for entry in kept_entries: 

366 fd.write(str(entry)) 

367 

368 if is_changelog and shortened: 

369 # For changelog (rather than NEWS) files, add a note about how to 

370 # get the full version. 

371 msg = textwrap.dedent( 

372 f"""\ 

373 # Older entries have been removed from this changelog. 

374 # To read the complete changelog use `apt changelog {package.name}`. 

375 """ 

376 ) 

377 fd.write(msg) 

378 

379 if binnmu_entries: 

380 if package.is_arch_all: 

381 _error( 

382 f"The package {package.name} is architecture all, but it is built during a binNMU. A binNMU build" 

383 " must not include architecture all packages" 

384 ) 

385 

386 with ( 

387 parent_dir.add_file( 

388 f"{path.name}.{package.resolved_architecture}" 

389 ) as binnmu_changelog, 

390 open( 

391 binnmu_changelog.fs_path, 

392 "w", 

393 encoding="utf-8", 

394 ) as binnmu_fd, 

395 ): 

396 for entry in binnmu_entries: 

397 binnmu_fd.write(str(entry)) 

398 

399 if not shortened: 

400 return False, None 

401 return True, {b.version for b in kept_entries} 

402 

403 

404def fixup_debian_changelog_and_news_file( 

405 dctrl: BinaryPackage, 

406 fs_root: VirtualPath, 

407 is_native: bool, 

408 build_env: DebBuildOptionsAndProfiles, 

409) -> None: 

410 doc_dir = fs_root.lookup(f"./usr/share/doc/{dctrl.name}") 

411 if not doc_dir: 

412 return 

413 changelog = doc_dir.get("changelog.Debian") 

414 if changelog and is_native: 

415 changelog.name = "changelog" 

416 elif is_native: 

417 changelog = doc_dir.get("changelog") 

418 

419 trim = False if "notrimdch" in build_env.deb_build_options else True 

420 

421 kept_entries = None 

422 pruned_changelog = False 

423 if changelog and changelog.has_fs_path: 

424 pruned_changelog, kept_entries = _prune_dch_file( 

425 dctrl, changelog, True, None, trim=trim 

426 ) 

427 

428 if not trim: 

429 return 

430 

431 news_file = doc_dir.get("NEWS.Debian") 

432 if news_file and news_file.has_fs_path and pruned_changelog: 

433 _prune_dch_file(dctrl, news_file, False, kept_entries) 

434 

435 

436_UPSTREAM_CHANGELOG_SOURCE_DIRS = [ 

437 ".", 

438 "doc", 

439 "docs", 

440] 

441_UPSTREAM_CHANGELOG_NAMES = { 

442 # The value is a priority to match the debhelper order. 

443 # - The suffix weights heavier than the basename (because that is what debhelper did) 

444 # 

445 # We list the name/suffix in order of priority in the code. That makes it easier to 

446 # see the priority directly, but it gives the "lowest" value to the most important items 

447 f"{n}{s}": (sw, nw) 

448 for (nw, n), (sw, s) in itertools.product( 

449 enumerate(["changelog", "changes", "history"], start=1), 

450 enumerate(["", ".txt", ".md", ".rst"], start=1), 

451 ) 

452} 

453_NONE_TUPLE = (None, (0, 0)) 

454 

455 

456def _detect_upstream_changelog(names: Iterable[str]) -> str | None: 

457 matches = [] 

458 for name in names: 

459 match_priority = _UPSTREAM_CHANGELOG_NAMES.get(name.lower()) 

460 if match_priority is not None: 

461 matches.append((name, match_priority)) 

462 return min(matches, default=_NONE_TUPLE, key=operator.itemgetter(1))[0] 

463 

464 

465def install_upstream_changelog( 

466 dctrl_bin: BinaryPackage, 

467 fs_root: FSPath, 

468 source_fs_root: VirtualPath, 

469) -> None: 

470 doc_dir = f"./usr/share/doc/{dctrl_bin.name}" 

471 bdir = fs_root.lookup(doc_dir) 

472 if bdir and not bdir.is_dir: 

473 # "/usr/share/doc/foo -> bar" symlink. Avoid croaking on those per: 

474 # https://salsa.debian.org/debian/debputy/-/issues/49 

475 return 

476 

477 if bdir: 

478 if bdir.get("changelog") or bdir.get("changelog.gz"): 

479 # Upstream's build system already provided the changelog with the correct name. 

480 # Accept that as the canonical one. 

481 return 

482 upstream_changelog = _detect_upstream_changelog( 

483 p.name for p in bdir.iterdir if p.is_file and p.has_fs_path and p.size > 0 

484 ) 

485 if upstream_changelog: 

486 p = bdir.lookup(upstream_changelog) 

487 assert p is not None # Mostly as a typing hint 

488 p.name = "changelog" 

489 return 

490 for dirname in _UPSTREAM_CHANGELOG_SOURCE_DIRS: 

491 dir_path = source_fs_root.lookup(dirname) 

492 if not dir_path or not dir_path.is_dir: 

493 continue 

494 changelog_name = _detect_upstream_changelog( 

495 p.name 

496 for p in dir_path.iterdir 

497 if p.is_file and p.has_fs_path and p.size > 0 

498 ) 

499 if changelog_name: 

500 if bdir is None: 500 ↛ 502line 500 didn't jump to line 502 because the condition on line 500 was always true

501 bdir = fs_root.mkdirs(doc_dir) 

502 bdir.insert_file_from_fs_path( 

503 "changelog", 

504 dir_path[changelog_name].fs_path, 

505 ) 

506 break 

507 

508 

509@dataclasses.dataclass(slots=True) 

510class _ElfInfo: 

511 path: VirtualPath 

512 fs_path: str 

513 is_stripped: bool | None = None 

514 build_id: str | None = None 

515 dbgsym: FSPath | None = None 

516 

517 

518def _elf_static_lib_walk_filter( 

519 fs_path: VirtualPath, 

520 children: list[VP], 

521) -> bool: 

522 if ( 

523 fs_path.name == ".build-id" 

524 and assume_not_none(fs_path.parent_dir).name == "debug" 

525 ): 

526 children.clear() 

527 return False 

528 # Deal with some special cases, where certain files are not supposed to be stripped in a given directory 

529 if "debug/" in fs_path.path or fs_path.name.endswith("debug/"): 

530 # FIXME: We need a way to opt out of this per #468333/#1016122 

531 for so_file in (f for f in list(children) if f.name.endswith(".so")): 

532 children.remove(so_file) 

533 if "/guile/" in fs_path.path or fs_path.name == "guile": 

534 for go_file in (f for f in list(children) if f.name.endswith(".go")): 

535 children.remove(go_file) 

536 return True 

537 

538 

539@contextlib.contextmanager 

540def _all_elf_files(fs_root: VirtualPath) -> Iterator[dict[str, _ElfInfo]]: 

541 all_elf_files = find_all_elf_files( 

542 fs_root, 

543 walk_filter=_elf_static_lib_walk_filter, 

544 ) 

545 if not all_elf_files: 

546 yield {} 

547 return 

548 with ExitStack() as cm_stack: 

549 resolved = ( 

550 (p, cm_stack.enter_context(p.replace_fs_path_content())) 

551 for p in all_elf_files 

552 ) 

553 elf_info = { 

554 fs_path: _ElfInfo( 

555 path=assume_not_none(fs_root.lookup(detached_path.path)), 

556 fs_path=fs_path, 

557 ) 

558 for detached_path, fs_path in resolved 

559 } 

560 _resolve_build_ids(elf_info) 

561 yield elf_info 

562 

563 

564def _find_all_static_libs( 

565 fs_root: FSPath, 

566) -> Iterator[FSPath]: 

567 for path, children in fs_root.walk(): 

568 # Matching the logic of dh_strip for now. 

569 if not _elf_static_lib_walk_filter(path, children): 

570 continue 

571 if not path.is_file: 

572 continue 

573 if path.name.startswith("lib") and path.name.endswith("_g.a"): 

574 # _g.a are historically ignored. I do not remember why, but guessing the "_g" is 

575 # an encoding of gcc's -g parameter into the filename (with -g meaning "I want debug 

576 # symbols") 

577 continue 

578 if not path.has_fs_path: 

579 continue 

580 with path.open(byte_io=True) as fd: 

581 magic = fd.read(8) 

582 if magic not in (b"!<arch>\n", b"!<thin>\n"): 

583 continue 

584 # Maybe we should see if the first file looks like an index file. 

585 # Three random .a samples suggests the index file is named "/" 

586 # Not sure if we should skip past it and then do the ELF check or just assume 

587 # that "index => static lib". 

588 data = fd.read(1024 * 1024) 

589 if b"\0" not in data and ELF_MAGIC not in data: 

590 continue 

591 yield path 

592 

593 

594@contextlib.contextmanager 

595def _all_static_libs(fs_root: FSPath) -> Iterator[list[str]]: 

596 all_static_libs = list(_find_all_static_libs(fs_root)) 

597 if not all_static_libs: 

598 yield [] 

599 return 

600 with ExitStack() as cm_stack: 

601 resolved: list[str] = [ 

602 cm_stack.enter_context(p.replace_fs_path_content()) for p in all_static_libs 

603 ] 

604 yield resolved 

605 

606 

607_FILE_BUILD_ID_RE = re.compile(rb"BuildID(?:\[\S+\])?=([A-Fa-f0-9]+)") 

608 

609 

610def _resolve_build_ids(elf_info: dict[str, _ElfInfo]) -> None: 

611 static_cmd = ["file", "-00", "-N"] 

612 if detect_fakeroot(): 

613 static_cmd.append("--no-sandbox") 

614 

615 for cmd in xargs(static_cmd, (i.fs_path for i in elf_info.values())): 

616 _info(f"Looking up build-ids via: {escape_shell(*cmd)}") 

617 output = subprocess.check_output(cmd) 

618 

619 # Trailing "\0" gives an empty element in the end when splitting, so strip it out 

620 lines = output.rstrip(b"\0").split(b"\0") 

621 

622 for fs_path_b, verdict in grouper(lines, 2, incomplete="strict"): 

623 fs_path = fs_path_b.decode("utf-8") 

624 info = elf_info[fs_path] 

625 info.is_stripped = b"not stripped" not in verdict 

626 m = _FILE_BUILD_ID_RE.search(verdict) 

627 if m: 

628 info.build_id = m.group(1).decode("utf-8") 

629 

630 

631def _make_debug_file( 

632 objcopy: str, fs_path: str, build_id: str, dbgsym_fs_root: FSPath 

633) -> FSPath: 

634 dbgsym_dirname = f"./usr/lib/debug/.build-id/{build_id[0:2]}/" 

635 dbgsym_basename = f"{build_id[2:]}.debug" 

636 dbgsym_dir = dbgsym_fs_root.mkdirs(dbgsym_dirname) 

637 if dbgsym_basename in dbgsym_dir: 

638 return dbgsym_dir[dbgsym_basename] 

639 # objcopy is a pain and includes the basename verbatim when you do `--add-gnu-debuglink` without having an option 

640 # to overwrite the physical basename. So we have to ensure that the physical basename matches the installed 

641 # basename. 

642 with dbgsym_dir.add_file( 

643 dbgsym_basename, 

644 unlink_if_exists=False, 

645 fs_basename_matters=True, 

646 subdir_key="dbgsym-build-ids", 

647 ) as dbgsym: 

648 try: 

649 subprocess.check_call( 

650 [ 

651 objcopy, 

652 "--only-keep-debug", 

653 "--compress-debug-sections", 

654 fs_path, 

655 dbgsym.fs_path, 

656 ] 

657 ) 

658 except subprocess.CalledProcessError: 

659 full_command = ( 

660 f"{objcopy} --only-keep-debug --compress-debug-sections" 

661 f" {escape_shell(fs_path, dbgsym.fs_path)}" 

662 ) 

663 _error( 

664 f"Attempting to create a .debug file failed. Please review the error message from {objcopy} to" 

665 f" understand what went wrong. Full command was: {full_command}" 

666 ) 

667 return dbgsym 

668 

669 

670def _strip_binary(strip: str, options: list[str], paths: Iterable[str]) -> None: 

671 # We assume the paths are obtained via `p.replace_fs_path_content()`, 

672 # which is the case at the time of written and should remain so forever. 

673 it = iter(paths) 

674 first = next(it, None) 

675 if first is None: 

676 return 

677 static_cmd = [strip] 

678 static_cmd.extend(options) 

679 

680 for cmd in xargs(static_cmd, itertools.chain((first,), (f for f in it))): 

681 _info(f"Removing unnecessary ELF debug info via: {escape_shell(*cmd)}") 

682 try: 

683 subprocess.check_call( 

684 cmd, 

685 stdin=subprocess.DEVNULL, 

686 restore_signals=True, 

687 ) 

688 except subprocess.CalledProcessError: 

689 _error( 

690 f"Attempting to remove ELF debug info failed. Please review the error from {strip} above" 

691 f" understand what went wrong." 

692 ) 

693 

694 

695def _attach_debug(objcopy: str, elf_binary: VirtualPath, dbgsym: FSPath) -> None: 

696 dbgsym_fs_path: str 

697 with dbgsym.replace_fs_path_content() as dbgsym_fs_path: 

698 cmd = [objcopy, "--add-gnu-debuglink", dbgsym_fs_path, elf_binary.fs_path] 

699 print_command(*cmd) 

700 try: 

701 subprocess.check_call(cmd) 

702 except subprocess.CalledProcessError: 

703 _error( 

704 f"Attempting to attach ELF debug link to ELF binary failed. Please review the error from {objcopy}" 

705 f" above understand what went wrong." 

706 ) 

707 

708 

709@functools.lru_cache 

710def _has_tool(tool: str) -> bool: 

711 return shutil.which(tool) is not None 

712 

713 

714def _run_dwz( 

715 dctrl: BinaryPackage, 

716 dbgsym_fs_root: FSPath, 

717 unstripped_elf_info: list[_ElfInfo], 

718) -> None: 

719 if not unstripped_elf_info or dctrl.is_udeb or not _has_tool("dwz"): 

720 return 

721 dwz_cmd = ["dwz"] 

722 dwz_ma_dir_name = f"usr/lib/debug/.dwz/{dctrl.deb_multiarch}" 

723 dwz_ma_basename = f"{dctrl.name}.debug" 

724 multifile = f"{dwz_ma_dir_name}/{dwz_ma_basename}" 

725 build_time_multifile = None 

726 if len(unstripped_elf_info) > 1: 

727 fs_content_dir = generated_content_dir() 

728 fd, build_time_multifile = mkstemp(suffix=dwz_ma_basename, dir=fs_content_dir) 

729 os.close(fd) 

730 dwz_cmd.append(f"-m{build_time_multifile}") 

731 dwz_cmd.append(f"-M/{multifile}") 

732 

733 # TODO: configuration for disabling multi-file and tweaking memory limits 

734 

735 dwz_cmd.extend(e.fs_path for e in unstripped_elf_info) 

736 

737 _info(f"Deduplicating ELF debug info via: {escape_shell(*dwz_cmd)}") 

738 try: 

739 subprocess.check_call(dwz_cmd) 

740 except subprocess.CalledProcessError: 

741 _error( 

742 "Attempting to deduplicate ELF info via dwz failed. Please review the output from dwz above" 

743 " to understand what went wrong." 

744 ) 

745 if build_time_multifile is not None and os.stat(build_time_multifile).st_size > 0: 

746 dwz_dir = dbgsym_fs_root.mkdirs(dwz_ma_dir_name) 

747 dwz_dir.insert_file_from_fs_path( 

748 dwz_ma_basename, 

749 build_time_multifile, 

750 mode=0o644, 

751 require_copy_on_write=False, 

752 follow_symlinks=False, 

753 ) 

754 

755 

756def relocate_dwarves_into_dbgsym_packages( 

757 dctrl: BinaryPackage, 

758 package_fs_root: FSPath, 

759 dbgsym_fs_root: VirtualPath, 

760 *, 

761 run_dwz: bool = False, 

762) -> list[str]: 

763 # FIXME: hardlinks 

764 with _all_static_libs(package_fs_root) as all_static_files: 

765 if all_static_files: 

766 strip = dctrl.cross_command("strip") 

767 _strip_binary( 

768 strip, 

769 [ 

770 "--strip-debug", 

771 "--remove-section=.comment", 

772 "--remove-section=.note", 

773 "--enable-deterministic-archives", 

774 "-R", 

775 ".gnu.lto_*", 

776 "-R", 

777 ".gnu.debuglto_*", 

778 "-N", 

779 "__gnu_lto_slim", 

780 "-N", 

781 "__gnu_lto_v1", 

782 ], 

783 all_static_files, 

784 ) 

785 

786 with _all_elf_files(package_fs_root) as all_elf_files: 

787 if not all_elf_files: 

788 return [] 

789 objcopy = dctrl.cross_command("objcopy") 

790 strip = dctrl.cross_command("strip") 

791 unstripped_elf_info = list( 

792 e for e in all_elf_files.values() if not e.is_stripped 

793 ) 

794 

795 if run_dwz: 

796 _run_dwz(dctrl, dbgsym_fs_root, unstripped_elf_info) 

797 

798 for elf_info in unstripped_elf_info: 

799 elf_info.dbgsym = _make_debug_file( 

800 objcopy, 

801 elf_info.fs_path, 

802 assume_not_none(elf_info.build_id), 

803 dbgsym_fs_root, 

804 ) 

805 

806 # Note: When run strip, we do so also on already stripped ELF binaries because that is what debhelper does! 

807 # Executables (defined by mode) 

808 _strip_binary( 

809 strip, 

810 ["--remove-section=.comment", "--remove-section=.note"], 

811 (i.fs_path for i in all_elf_files.values() if i.path.is_executable), 

812 ) 

813 

814 # Libraries (defined by mode) 

815 _strip_binary( 

816 strip, 

817 ["--remove-section=.comment", "--remove-section=.note", "--strip-unneeded"], 

818 (i.fs_path for i in all_elf_files.values() if not i.path.is_executable), 

819 ) 

820 

821 for elf_info in unstripped_elf_info: 

822 _attach_debug( 

823 objcopy, 

824 assume_not_none(elf_info.path), 

825 assume_not_none(elf_info.dbgsym), 

826 ) 

827 

828 # Set for uniqueness 

829 all_debug_info = sorted( 

830 {assume_not_none(i.build_id) for i in unstripped_elf_info} 

831 ) 

832 

833 dbgsym_doc_dir = dbgsym_fs_root.mkdirs("./usr/share/doc/") 

834 dbgsym_doc_dir.add_symlink(f"{dctrl.name}-dbgsym", dctrl.name) 

835 return all_debug_info 

836 

837 

838def run_package_processors( 

839 manifest: "HighLevelManifest", 

840 package_metadata_context: PackageProcessingContext, 

841 fs_root: VirtualPath, 

842) -> None: 

843 pppps = manifest.plugin_provided_feature_set.package_processors_in_order() 

844 binary_package = package_metadata_context.binary_package 

845 for pppp in pppps: 

846 if not pppp.applies_to(binary_package): 

847 continue 

848 pppp.run_package_processor(fs_root, None, package_metadata_context) 

849 

850 

851def cross_package_control_files( 

852 package_data_table: PackageDataTable, 

853 manifest: "HighLevelManifest", 

854) -> None: 

855 errors = [] 

856 combined_shlibs = ShlibsContent() 

857 shlibs_dir = None 

858 shlib_dirs: list[str] = [] 

859 shlibs_local = manifest.debian_dir.get("shlibs.local") 

860 if shlibs_local and shlibs_local.is_file: 

861 with shlibs_local.open() as fd: 

862 combined_shlibs.add_entries_from_shlibs_file(fd) 

863 

864 debputy_plugin_metadata = manifest.plugin_provided_feature_set.plugin_data[ 

865 "debputy" 

866 ] 

867 

868 for binary_package_data in package_data_table: 

869 binary_package = binary_package_data.binary_package 

870 if binary_package.is_arch_all or not binary_package.should_be_acted_on: 

871 continue 

872 fs_root = binary_package_data.fs_root 

873 package_state = manifest.package_state_for(binary_package.name) 

874 related_udeb_package = ( 

875 binary_package_data.package_metadata_context.related_udeb_package 

876 ) 

877 

878 udeb_package_name = related_udeb_package.name if related_udeb_package else None 

879 ctrl = binary_package_data.ctrl_creator.for_plugin( 

880 debputy_plugin_metadata, 

881 "compute_shlibs", 

882 ) 

883 try: 

884 soname_info_list = compute_shlibs( 

885 binary_package, 

886 binary_package_data.control_output_dir.fs_path, 

887 fs_root, 

888 manifest, 

889 udeb_package_name, 

890 ctrl, 

891 package_state.reserved_packager_provided_files, 

892 combined_shlibs, 

893 ) 

894 except DebputyDpkgGensymbolsError as e: 

895 errors.append(e.message) 

896 else: 

897 if soname_info_list: 

898 if shlibs_dir is None: 

899 shlibs_dir = generated_content_dir( 

900 subdir_key="_shlibs_materialization_dir" 

901 ) 

902 generate_shlib_dirs( 

903 binary_package, 

904 shlibs_dir, 

905 soname_info_list, 

906 shlib_dirs, 

907 ) 

908 if errors: 

909 for error in errors: 

910 _warn(error) 

911 _error("Stopping due to the errors above") 

912 

913 generated_shlibs_local = None 

914 if combined_shlibs: 

915 if shlibs_dir is None: 

916 shlibs_dir = generated_content_dir(subdir_key="_shlibs_materialization_dir") 

917 generated_shlibs_local = os.path.join(shlibs_dir, "shlibs.local") 

918 with open(generated_shlibs_local, "w", encoding="utf-8") as fd: 

919 combined_shlibs.write_to(fd) 

920 _info(f"Generated {generated_shlibs_local} for dpkg-shlibdeps") 

921 

922 for binary_package_data in package_data_table: 

923 binary_package = binary_package_data.binary_package 

924 if binary_package.is_arch_all or not binary_package.should_be_acted_on: 

925 continue 

926 binary_package_data.ctrl_creator.shlibs_details = ( 

927 generated_shlibs_local, 

928 shlib_dirs, 

929 ) 

930 

931 

932def _relevant_service_definitions( 

933 service_rule: ServiceRule, 

934 service_managers: list[str] | frozenset[str], 

935 by_service_manager_key: Mapping[ 

936 tuple[str, str, str, str], tuple[ServiceManagerDetails, ServiceDefinition[Any]] 

937 ], 

938 aliases: Mapping[str, Sequence[tuple[str, str, str, str]]], 

939) -> Iterable[tuple[tuple[str, str, str, str], ServiceDefinition[Any]]]: 

940 as_keys = (key for key in aliases[service_rule.service]) 

941 

942 pending_queue = { 

943 key 

944 for key in as_keys 

945 if key in by_service_manager_key 

946 and service_rule.applies_to_service_manager(key[-1]) 

947 } 

948 relevant_names: dict[tuple[str, str, str, str], ServiceDefinition[Any]] = {} 

949 seen_keys = set() 

950 

951 if not pending_queue: 

952 service_manager_names = ", ".join(sorted(service_managers)) 

953 _error( 

954 f"No none of the service managers ({service_manager_names}) detected a service named" 

955 f" {service_rule.service} (type: {service_rule.type_of_service}, scope: {service_rule.service_scope})," 

956 f" but the manifest definition at {service_rule.definition_source} requested that." 

957 ) 

958 

959 while pending_queue: 

960 next_key = pending_queue.pop() 

961 seen_keys.add(next_key) 

962 _, definition = by_service_manager_key[next_key] 

963 yield next_key, definition 

964 for name in definition.names: 

965 for target_key in aliases[name]: 

966 if ( 

967 target_key not in seen_keys 

968 and service_rule.applies_to_service_manager(target_key[-1]) 

969 ): 

970 pending_queue.add(target_key) 

971 

972 return relevant_names.items() 

973 

974 

975def handle_service_management( 

976 binary_package_data: "BinaryPackageData", 

977 manifest: "HighLevelManifest", 

978 package_metadata_context: PackageProcessingContext, 

979 fs_root: VirtualPath, 

980 feature_set: PluginProvidedFeatureSet, 

981) -> None: 

982 

983 by_service_manager_key = {} 

984 aliases_by_name = collections.defaultdict(list) 

985 

986 state = manifest.package_state_for(binary_package_data.binary_package.name) 

987 all_service_managers = list(feature_set.service_managers) 

988 requested_service_rules = state.requested_service_rules 

989 for requested_service_rule in requested_service_rules: 

990 if not requested_service_rule.service_managers: 

991 continue 

992 for manager in requested_service_rule.service_managers: 

993 if manager not in feature_set.service_managers: 

994 # FIXME: Missing definition source; move to parsing. 

995 _error( 

996 f"Unknown service manager {manager} used at {requested_service_rule.definition_source}" 

997 ) 

998 

999 for service_manager_details in feature_set.service_managers.values(): 

1000 service_registry: ServiceRegistryImpl = ServiceRegistryImpl( 

1001 service_manager_details 

1002 ) 

1003 service_manager_details.service_detector( 

1004 fs_root, 

1005 service_registry, 

1006 package_metadata_context, 

1007 ) 

1008 

1009 service_definitions = service_registry.detected_services 

1010 if not service_definitions: 

1011 continue 

1012 

1013 for plugin_provided_definition in service_definitions: 

1014 key = ( 

1015 plugin_provided_definition.name, 

1016 plugin_provided_definition.type_of_service, 

1017 plugin_provided_definition.service_scope, 

1018 service_manager_details.service_manager, 

1019 ) 

1020 by_service_manager_key[key] = ( 

1021 service_manager_details, 

1022 plugin_provided_definition, 

1023 ) 

1024 

1025 for name in plugin_provided_definition.names: 

1026 aliases_by_name[name].append(key) 

1027 

1028 for requested_service_rule in requested_service_rules: 

1029 explicit_service_managers = requested_service_rule.service_managers is not None 

1030 related_service_managers = ( 

1031 requested_service_rule.service_managers or all_service_managers 

1032 ) 

1033 seen_service_managers = set() 

1034 for service_key, service_definition in _relevant_service_definitions( 

1035 requested_service_rule, 

1036 related_service_managers, 

1037 by_service_manager_key, 

1038 aliases_by_name, 

1039 ): 

1040 sm = service_key[-1] 

1041 seen_service_managers.add(sm) 

1042 by_service_manager_key[service_key] = ( 

1043 by_service_manager_key[service_key][0], 

1044 requested_service_rule.apply_to_service_definition(service_definition), 

1045 ) 

1046 if ( 

1047 explicit_service_managers 

1048 and seen_service_managers != related_service_managers 

1049 ): 

1050 missing_sms = ", ".join( 

1051 sorted(related_service_managers - seen_service_managers) 

1052 ) 

1053 _error( 

1054 f"The rule {requested_service_rule.definition_source} explicitly requested which service managers" 

1055 f" it should apply to. However, the following service managers did not provide a service of that" 

1056 f" name, type and scope: {missing_sms}. Please check the rule is correct and either provide the" 

1057 f" missing service or update the definition match the relevant services." 

1058 ) 

1059 

1060 per_service_manager = {} 

1061 

1062 for ( 

1063 service_manager_details, 

1064 plugin_provided_definition, 

1065 ) in by_service_manager_key.values(): 

1066 service_manager = service_manager_details.service_manager 

1067 if service_manager not in per_service_manager: 

1068 per_service_manager[service_manager] = ( 

1069 service_manager_details, 

1070 [plugin_provided_definition], 

1071 ) 

1072 else: 

1073 per_service_manager[service_manager][1].append(plugin_provided_definition) 

1074 

1075 for ( 

1076 service_manager_details, 

1077 final_service_definitions, 

1078 ) in per_service_manager.values(): 

1079 ctrl = binary_package_data.ctrl_creator.for_plugin( 

1080 service_manager_details.plugin_metadata, 

1081 service_manager_details.service_manager, 

1082 default_snippet_order="service", 

1083 ) 

1084 _info(f"Applying {final_service_definitions}") 

1085 service_manager_details.service_integrator( 

1086 final_service_definitions, 

1087 ctrl, 

1088 package_metadata_context, 

1089 ) 

1090 

1091 

1092def setup_control_files( 

1093 binary_package_data: "BinaryPackageData", 

1094 manifest: "HighLevelManifest", 

1095 dbgsym_fs_root: VirtualPath, 

1096 dbgsym_ids: list[str], 

1097 package_metadata_context: PackageProcessingContext, 

1098 *, 

1099 allow_ctrl_file_management: bool = True, 

1100) -> None: 

1101 binary_package = package_metadata_context.binary_package 

1102 control_output_dir = binary_package_data.control_output_dir 

1103 control_output_fs_path = control_output_dir.fs_path 

1104 fs_root = binary_package_data.fs_root 

1105 package_state = manifest.package_state_for(binary_package.name) 

1106 

1107 feature_set: PluginProvidedFeatureSet = manifest.plugin_provided_feature_set 

1108 metadata_maintscript_detectors = feature_set.metadata_maintscript_detectors 

1109 substvars = binary_package_data.substvars 

1110 

1111 snippets = STD_CONTROL_SCRIPTS 

1112 generated_triggers = list(binary_package_data.ctrl_creator.generated_triggers()) 

1113 

1114 if binary_package.is_udeb: 

1115 # FIXME: Add missing udeb scripts 

1116 snippets = ["postinst"] 

1117 

1118 if allow_ctrl_file_management: 

1119 process_alternatives( 

1120 binary_package, 

1121 fs_root, 

1122 package_state.reserved_packager_provided_files, 

1123 package_state.maintscript_snippets, 

1124 substvars, 

1125 ) 

1126 process_debconf_templates( 

1127 binary_package, 

1128 package_state.reserved_packager_provided_files, 

1129 package_state.maintscript_snippets, 

1130 substvars, 

1131 control_output_fs_path, 

1132 ) 

1133 

1134 handle_service_management( 

1135 binary_package_data, 

1136 manifest, 

1137 package_metadata_context, 

1138 fs_root, 

1139 feature_set, 

1140 ) 

1141 

1142 plugin_detector_definition: MetadataOrMaintscriptDetector 

1143 for plugin_detector_definition in itertools.chain.from_iterable( 

1144 metadata_maintscript_detectors.values() 

1145 ): 

1146 if not plugin_detector_definition.applies_to(binary_package): 

1147 continue 

1148 ctrl = binary_package_data.ctrl_creator.for_plugin( 

1149 plugin_detector_definition.plugin_metadata, 

1150 plugin_detector_definition.detector_id, 

1151 ) 

1152 plugin_detector_definition.run_detector( 

1153 fs_root, ctrl, package_metadata_context 

1154 ) 

1155 

1156 for script in snippets: 

1157 _generate_snippet( 

1158 control_output_fs_path, 

1159 script, 

1160 package_state.maintscript_snippets, 

1161 ) 

1162 

1163 else: 

1164 state = manifest.package_state_for(binary_package_data.binary_package.name) 

1165 if state.requested_service_rules: 

1166 service_source = state.requested_service_rules[0].definition_source 

1167 _error( 

1168 f"Use of service definitions (such as {service_source}) is not supported in this integration mode" 

1169 ) 

1170 for script, snippet_container in package_state.maintscript_snippets.items(): 

1171 for snippet in snippet_container.all_snippets(): 

1172 source = snippet.definition_source 

1173 _error( 

1174 f"This integration mode cannot use maintscript snippets" 

1175 f' (since dh_installdeb has already been called). However, "{source}" triggered' 

1176 f" a snippet for {script}. Please remove the offending definition if it is from" 

1177 f" the manifest or file a bug if it is caused by a built-in rule." 

1178 ) 

1179 

1180 for trigger in generated_triggers: 

1181 source = f"{trigger.provider.plugin_name}:{trigger.provider_source_id}" 

1182 _error( 

1183 f"This integration mode must not generate triggers" 

1184 f' (since dh_installdeb has already been called). However, "{source}" created' 

1185 f" a trigger. Please remove the offending definition if it is from" 

1186 f" the manifest or file a bug if it is caused by a built-in rule." 

1187 ) 

1188 

1189 shlibdeps_definition = [ 

1190 d 

1191 for d in metadata_maintscript_detectors["debputy"] 

1192 if d.detector_id == "dpkg-shlibdeps" 

1193 ][0] 

1194 

1195 ctrl = binary_package_data.ctrl_creator.for_plugin( 

1196 shlibdeps_definition.plugin_metadata, 

1197 shlibdeps_definition.detector_id, 

1198 ) 

1199 shlibdeps_definition.run_detector(fs_root, ctrl, package_metadata_context) 

1200 

1201 dh_staging_dir = os.path.join("debian", binary_package.name, "DEBIAN") 

1202 try: 

1203 with os.scandir(dh_staging_dir) as it: 

1204 existing_control_files = [ 

1205 f.path 

1206 for f in it 

1207 if f.is_file(follow_symlinks=False) 

1208 and f.name not in ("control", "md5sums") 

1209 ] 

1210 except FileNotFoundError: 

1211 existing_control_files = [] 

1212 

1213 if existing_control_files: 

1214 cmd = ["cp", "-a"] 

1215 cmd.extend(existing_control_files) 

1216 cmd.append(control_output_fs_path) 

1217 print_command(*cmd) 

1218 subprocess.check_call(cmd) 

1219 

1220 if binary_package.is_udeb: 

1221 _generate_control_files( 

1222 binary_package_data, 

1223 package_state, 

1224 control_output_dir, 

1225 fs_root, 

1226 substvars, 

1227 # We never built udebs due to #797391, so skip over this information, 

1228 # when creating the udeb 

1229 None, 

1230 None, 

1231 ) 

1232 return 

1233 

1234 if generated_triggers: 

1235 assert allow_ctrl_file_management 

1236 dest_file = os.path.join(control_output_fs_path, "triggers") 

1237 with open(dest_file, "a", encoding="utf-8") as fd: 

1238 fd.writelines( 

1239 textwrap.dedent( 

1240 f"""\ 

1241 # Added by {t.provider_source_id} from {t.provider.plugin_name} 

1242 {t.dpkg_trigger_type} {t.dpkg_trigger_target} 

1243 """ 

1244 ) 

1245 for t in generated_triggers 

1246 ) 

1247 os.chmod(fd.fileno(), 0o644) 

1248 

1249 if allow_ctrl_file_management: 

1250 install_or_generate_conffiles( 

1251 control_output_dir, 

1252 fs_root, 

1253 package_state.reserved_packager_provided_files, 

1254 ) 

1255 

1256 _generate_control_files( 

1257 binary_package_data, 

1258 package_state, 

1259 control_output_dir, 

1260 fs_root, 

1261 substvars, 

1262 dbgsym_fs_root, 

1263 dbgsym_ids, 

1264 ) 

1265 

1266 

1267def _generate_snippet( 

1268 control_output_dir: str, 

1269 script: str, 

1270 maintscript_snippets: dict[str, MaintscriptSnippetContainer], 

1271) -> None: 

1272 debputy_snippets = maintscript_snippets.get(script) 

1273 if debputy_snippets is None: 

1274 return 

1275 reverse = script in ("prerm", "postrm") 

1276 snippets = [ 

1277 debputy_snippets.generate_snippet(reverse=reverse), 

1278 debputy_snippets.generate_snippet(snippet_order="service", reverse=reverse), 

1279 ] 

1280 if reverse: 

1281 snippets = reversed(snippets) 

1282 full_content = "".join(f"{s}\n" for s in filter(None, snippets)) 

1283 if not full_content: 

1284 return 

1285 filename = os.path.join(control_output_dir, script) 

1286 with open(filename, "w") as fd: 

1287 fd.write("#!/bin/sh\nset -e\n\n") 

1288 fd.write(full_content) 

1289 os.chmod(fd.fileno(), 0o755) 

1290 

1291 

1292def _add_conffiles( 

1293 ctrl_root: VirtualPathBase, 

1294 conffile_matches: Iterable[VirtualPath], 

1295) -> None: 

1296 it = iter(conffile_matches) 

1297 first = next(it, None) 

1298 if first is None: 

1299 return 

1300 conffiles = itertools.chain([first], it) 

1301 with ctrl_root.open_child("conffiles", "at") as fd: 

1302 for conffile_match in conffiles: 

1303 conffile = conffile_match.absolute 

1304 assert conffile_match.is_file 

1305 fd.write(f"{conffile}\n") 

1306 

1307 

1308def _ensure_base_substvars_defined(substvars: FlushableSubstvars) -> None: 

1309 for substvar in ("misc:Depends", "misc:Pre-Depends"): 

1310 if substvar not in substvars: 

1311 substvars[substvar] = "" 

1312 

1313 

1314def compute_installed_size(fs_root: VirtualPath) -> int: 

1315 """Emulate dpkg-gencontrol's code for computing the default Installed-Size""" 

1316 size_in_kb = 0 

1317 hard_links = set() 

1318 for path in fs_root.all_paths(): 

1319 if path.is_symlink or path.is_file: 

1320 try: 

1321 st = path.stat() 

1322 if st.st_nlink > 1: 

1323 hl_key = (st.st_dev, st.st_ino) 

1324 if hl_key in hard_links: 

1325 continue 

1326 hard_links.add(hl_key) 

1327 size = st.st_size 

1328 except PureVirtualPathError: 

1329 # We just assume it is not a hard link when the path is purely virtual 

1330 size = path.size 

1331 path_size = (size + 1023) // 1024 

1332 else: 

1333 path_size = 1 

1334 size_in_kb += path_size 

1335 return size_in_kb 

1336 

1337 

1338def _generate_dbgsym_control_file_if_relevant( 

1339 binary_package: BinaryPackage, 

1340 dbgsym_fs_root: VirtualPath, 

1341 dbgsym_control_dir: FSControlRootDir, 

1342 dbgsym_ids: str, 

1343 multi_arch: str | None, 

1344 dctrl: str, 

1345 extra_common_params: Sequence[str], 

1346) -> None: 

1347 section = binary_package.archive_section 

1348 component = "" 

1349 extra_params = [] 

1350 if section is not None and "/" in section and not section.startswith("main/"): 

1351 component = section.split("/", 1)[1] + "/" 

1352 if multi_arch != "same": 

1353 extra_params.append("-UMulti-Arch") 

1354 else: 

1355 extra_params.append(f"-DMulti-Arch={multi_arch}") 

1356 extra_params.append("-UReplaces") 

1357 extra_params.append("-UBreaks") 

1358 dbgsym_control_fs_path = dbgsym_control_dir.fs_path 

1359 ensure_dir(dbgsym_control_fs_path) 

1360 # Pass it via cmd-line to make it more visible that we are providing the 

1361 # value. It also prevents the dbgsym package from picking up this value. 

1362 total_size = compute_installed_size(dbgsym_fs_root) + compute_installed_size( 

1363 dbgsym_control_dir 

1364 ) 

1365 extra_params.append(f"-VInstalled-Size={total_size}") 

1366 extra_params.extend(extra_common_params) 

1367 

1368 package = binary_package.name 

1369 package_selector = ( 

1370 binary_package.name 

1371 if dctrl == "debian/control" 

1372 else f"{binary_package.name}-dbgsym" 

1373 ) 

1374 dpkg_cmd = [ 

1375 "dpkg-gencontrol", 

1376 f"-p{package_selector}", 

1377 # FIXME: Support d/<pkg>.changelog at some point. 

1378 "-ldebian/changelog", 

1379 "-T/dev/null", 

1380 f"-c{dctrl}", 

1381 f"-O{dbgsym_control_fs_path}/control", 

1382 # Use a placeholder for -P to ensure failure if we forgot to override a path parameter 

1383 "-P/non-existent", 

1384 f"-DPackage={package}-dbgsym", 

1385 "-DDepends=" + package + " (= ${binary:Version})", 

1386 f"-DDescription=debug symbols for {package}", 

1387 f"-DSection={component}debug", 

1388 f"-DBuild-Ids={dbgsym_ids}", 

1389 "-UPre-Depends", 

1390 "-URecommends", 

1391 "-USuggests", 

1392 "-UEnhances", 

1393 "-UProvides", 

1394 "-UEssential", 

1395 "-UConflicts", 

1396 "-DPriority=optional", 

1397 "-UHomepage", 

1398 "-UImportant", 

1399 "-UBuilt-Using", 

1400 "-UStatic-Built-Using", 

1401 "-DAuto-Built-Package=debug-symbols", 

1402 "-UProtected", 

1403 *extra_params, 

1404 ] 

1405 print_command(*dpkg_cmd) 

1406 try: 

1407 subprocess.check_call(dpkg_cmd) 

1408 except subprocess.CalledProcessError: 

1409 _error( 

1410 f"Attempting to generate DEBIAN/control file for {package}-dbgsym failed. Please review the output from " 

1411 " dpkg-gencontrol above to understand what went wrong." 

1412 ) 

1413 os.chmod(os.path.join(dbgsym_control_fs_path, "control"), 0o644) 

1414 

1415 

1416def _all_parent_directories_of(directories: Iterable[str]) -> set[str]: 

1417 result = {"."} 

1418 for path in directories: 

1419 current = os.path.dirname(path) 

1420 while current and current not in result: 

1421 result.add(current) 

1422 current = os.path.dirname(current) 

1423 return result 

1424 

1425 

1426def _compute_multi_arch_for_arch_all_doc( 

1427 binary_package: BinaryPackage, 

1428 fs_root: FSPath, 

1429) -> str | None: 

1430 if not binary_package.name.endswith(("-doc", "-docs")): 

1431 # We limit by package name, since there are tricks involving a `Multi-Arch: no` depending on a 

1432 # `Multi-Arch: same` to emulate `Multi-Arch: allowed`. Said `Multi-Arch: no` can have no contents. 

1433 # 

1434 # That case seems unrealistic for -doc/-docs packages and accordingly the limitation here. 

1435 return None 

1436 acceptable_no_descend_paths = { 

1437 "./usr/share/doc", 

1438 } 

1439 acceptable_files = {f"./usr/share/lintian/overrides/{binary_package.name}"} 

1440 if _any_unacceptable_paths( 

1441 fs_root, 

1442 acceptable_no_descend_paths=acceptable_no_descend_paths, 

1443 acceptable_files=acceptable_files, 

1444 ): 

1445 return None 

1446 return "foreign" 

1447 

1448 

1449def _any_unacceptable_paths( 

1450 fs_root: FSPath, 

1451 *, 

1452 acceptable_no_descend_paths: list[str] | AbstractSet[str] = frozenset(), 

1453 acceptable_files: list[str] | AbstractSet[str] = frozenset(), 

1454) -> bool: 

1455 acceptable_intermediate_dirs = _all_parent_directories_of( 

1456 itertools.chain(acceptable_no_descend_paths, acceptable_files) 

1457 ) 

1458 for fs_path, children in fs_root.walk(): 

1459 path = fs_path.path 

1460 if path in acceptable_no_descend_paths: 

1461 children.clear() 

1462 continue 

1463 if path in acceptable_intermediate_dirs or path in acceptable_files: 

1464 continue 

1465 return True 

1466 return False 

1467 

1468 

1469def auto_compute_multi_arch( 

1470 binary_package: BinaryPackage, 

1471 control_output_dir: VirtualPath, 

1472 fs_root: FSPath, 

1473) -> str | None: 

1474 resolved_arch = binary_package.resolved_architecture 

1475 if any( 

1476 script 

1477 for script in ALL_CONTROL_SCRIPTS 

1478 if (p := control_output_dir.get(script)) is not None and p.is_file 

1479 ): 

1480 return None 

1481 

1482 if resolved_arch == "all": 

1483 return _compute_multi_arch_for_arch_all_doc(binary_package, fs_root) 

1484 

1485 resolved_multiarch = binary_package.deb_multiarch 

1486 assert resolved_arch != "all" 

1487 acceptable_no_descend_paths = { 

1488 f"./usr/lib/{resolved_multiarch}", 

1489 f"./usr/include/{resolved_multiarch}", 

1490 } 

1491 acceptable_files = { 

1492 f"./usr/share/doc/{binary_package.name}/{basename}" 

1493 for basename in ( 

1494 "copyright", 

1495 "changelog.gz", 

1496 "changelog.Debian.gz", 

1497 f"changelog.Debian.{resolved_arch}.gz", 

1498 "NEWS.Debian", 

1499 "NEWS.Debian.gz", 

1500 "README.Debian", 

1501 "README.Debian.gz", 

1502 ) 

1503 } 

1504 

1505 # Note that the lintian-overrides file is deliberately omitted from the allow-list. We would have to know that the 

1506 # override does not use architecture segments. With pure debputy, this is guaranteed (debputy 

1507 # does not allow lintian-overrides with architecture segment). However, with a mixed debhelper + debputy, 

1508 # `dh_lintian` allows it with compat 13 or older. 

1509 

1510 if _any_unacceptable_paths( 

1511 fs_root, 

1512 acceptable_no_descend_paths=acceptable_no_descend_paths, 

1513 acceptable_files=acceptable_files, 

1514 ): 

1515 return None 

1516 

1517 return "same" 

1518 

1519 

1520@functools.lru_cache 

1521def _has_t64_enabled() -> bool: 

1522 try: 

1523 output = subprocess.check_output( 

1524 ["dpkg-buildflags", "--query-features", "abi"] 

1525 ).decode() 

1526 except (subprocess.CalledProcessError, FileNotFoundError): 

1527 return False 

1528 

1529 for stanza in Deb822.iter_paragraphs(output): 

1530 if stanza.get("Feature") == "time64" and stanza.get("Enabled") == "yes": 

1531 return True 

1532 return False 

1533 

1534 

1535def _t64_migration_substvar( 

1536 binary_package: BinaryPackage, 

1537 control_output_dir: VirtualPath, 

1538 substvars: FlushableSubstvars, 

1539) -> None: 

1540 name = binary_package.name 

1541 compat_name = binary_package.fields.get("X-Time64-Compat") 

1542 if compat_name is None and not _T64_REGEX.match(name): 

1543 return 

1544 

1545 if not any( 

1546 p.is_file 

1547 for n in ["symbols", "shlibs"] 

1548 if (p := control_output_dir.get(n)) is not None 

1549 ): 

1550 return 

1551 

1552 if compat_name is None: 

1553 compat_name = name.replace("t64", "", 1) 

1554 if compat_name == name: 

1555 raise AssertionError( 

1556 f"Failed to derive a t64 compat name for {name}. Please file a bug against debputy." 

1557 " As a work around, you can explicitly provide a X-Time64-Compat header in debian/control" 

1558 " where you specify the desired compat name." 

1559 ) 

1560 

1561 arch_bits = binary_package.package_deb_architecture_variable("ARCH_BITS") 

1562 

1563 if arch_bits != "32" or not _has_t64_enabled(): 

1564 substvars.add_dependency( 

1565 _T64_PROVIDES, 

1566 f"{compat_name} (= ${ binary:Version} )", 

1567 ) 

1568 elif _T64_PROVIDES not in substvars: 

1569 substvars[_T64_PROVIDES] = "" 

1570 

1571 

1572@functools.lru_cache 

1573def dpkg_field_list_pkg_dep() -> Sequence[str]: 

1574 try: 

1575 output = subprocess.check_output( 

1576 [ 

1577 "perl", 

1578 "-MDpkg::Control::Fields", 

1579 "-e", 

1580 r'print "$_\n" for field_list_pkg_dep', 

1581 ] 

1582 ) 

1583 except (FileNotFoundError, subprocess.CalledProcessError): 

1584 _error("Could not run perl -MDpkg::Control::Fields to get a list of fields") 

1585 return output.decode("utf-8").splitlines(keepends=False) 

1586 

1587 

1588_SUBSTVARS_FIELDS_NOT_SUPPORTED_BY_DPKG = { 

1589 "Commands", 

1590} 

1591 

1592 

1593@functools.lru_cache 

1594def all_auto_substvars() -> Sequence[str]: 

1595 result = [x for x in dpkg_field_list_pkg_dep()] 

1596 result.extend(_SUBSTVARS_FIELDS_NOT_SUPPORTED_BY_DPKG) 

1597 return tuple(result) 

1598 

1599 

1600def _handle_auto_substvars( 

1601 source: SourcePackage, 

1602 dctrl_file: BinaryPackage, 

1603 substvars: FlushableSubstvars, 

1604 has_dbgsym: bool, 

1605) -> str | None: 

1606 auto_substvars_fields = all_auto_substvars() 

1607 auto_substvars_fields_lc = {x.lower(): x for x in auto_substvars_fields} 

1608 substvar_fields = collections.defaultdict(set) 

1609 needs_dbgsym_stanza = False 

1610 for substvar_name, substvar in substvars.as_substvar.items(): 

1611 if ":" not in substvar_name: 

1612 continue 

1613 if substvar.assignment_operator in ("$=", "!="): 

1614 # Will create incorrect results if there is a dbgsym and we do nothing 

1615 needs_dbgsym_stanza = True 

1616 

1617 if substvar.assignment_operator == "$=": 

1618 # Automatically handled; no need for manual merging. 

1619 continue 

1620 _, field = substvar_name.rsplit(":", 1) 

1621 field_lc = field.lower() 

1622 if field_lc not in auto_substvars_fields_lc: 

1623 continue 

1624 substvar_fields[field_lc].add("${" + substvar_name + "}") 

1625 

1626 if not has_dbgsym: 

1627 needs_dbgsym_stanza = False 

1628 

1629 if not substvar_fields and not needs_dbgsym_stanza: 

1630 return None 

1631 

1632 replacement_stanza = debian.deb822.Deb822(dctrl_file.fields) 

1633 

1634 for field_name in auto_substvars_fields: 

1635 field_name_lc = field_name.lower() 

1636 addendum = substvar_fields.get(field_name_lc) 

1637 if addendum is None: 

1638 # No merging required 

1639 continue 

1640 substvars_part = ", ".join(sorted(addendum)) 

1641 existing_value = replacement_stanza.get(field_name) 

1642 

1643 if existing_value is None or existing_value.isspace(): 

1644 final_value = substvars_part 

1645 else: 

1646 existing_value = existing_value.rstrip().rstrip(",") 

1647 final_value = f"{existing_value}, {substvars_part}" 

1648 replacement_stanza[field_name] = final_value 

1649 canonical_field_name = auto_substvars_fields_lc.get(field_name_lc) 

1650 # If `dpkg` does not know the field, we need to inject `XB-` in front 

1651 # of it. 

1652 if ( 

1653 canonical_field_name 

1654 and canonical_field_name in _SUBSTVARS_FIELDS_NOT_SUPPORTED_BY_DPKG 

1655 ): 

1656 replacement_stanza[f"XB-{canonical_field_name}"] = replacement_stanza[ 

1657 field_name 

1658 ] 

1659 del replacement_stanza[field_name] 

1660 

1661 with suppress(KeyError): 

1662 replacement_stanza.order_last("Description") 

1663 

1664 tmpdir = generated_content_dir(package=dctrl_file) 

1665 with tempfile.NamedTemporaryFile( 

1666 mode="wb", 

1667 dir=tmpdir, 

1668 suffix="__DEBIAN_control", 

1669 delete=False, 

1670 ) as fd: 

1671 try: 

1672 cast("Any", source.fields).dump(fd) 

1673 except AttributeError: 

1674 debian.deb822.Deb822(source.fields).dump(fd) 

1675 fd.write(b"\n") 

1676 replacement_stanza.dump(fd) 

1677 

1678 if has_dbgsym: 

1679 # Minimal stanza to avoid substvars warnings. Most fields are still set 

1680 # via -D. 

1681 dbgsym_stanza = Deb822() 

1682 dbgsym_stanza["Package"] = f"{dctrl_file.name}-dbgsym" 

1683 dbgsym_stanza["Architecture"] = dctrl_file.fields["Architecture"] 

1684 dbgsym_stanza["Description"] = f"debug symbols for {dctrl_file.name}" 

1685 fd.write(b"\n") 

1686 dbgsym_stanza.dump(fd) 

1687 

1688 return fd.name 

1689 

1690 

1691def _generate_control_files( 

1692 binary_package_data: "BinaryPackageData", 

1693 package_state: "PackageTransformationDefinition", 

1694 control_output_dir: FSControlRootDir, 

1695 fs_root: FSPath, 

1696 substvars: FlushableSubstvars, 

1697 dbgsym_root_fs: VirtualPath | None, 

1698 dbgsym_build_ids: list[str] | None, 

1699) -> None: 

1700 binary_package = binary_package_data.binary_package 

1701 source_package = binary_package_data.source_package 

1702 package_name = binary_package.name 

1703 extra_common_params = [] 

1704 extra_params_specific = [] 

1705 _ensure_base_substvars_defined(substvars) 

1706 if "Installed-Size" not in substvars: 

1707 # Pass it via cmd-line to make it more visible that we are providing the 

1708 # value. It also prevents the dbgsym package from picking up this value. 

1709 total_size = compute_installed_size(fs_root) + compute_installed_size( 

1710 control_output_dir 

1711 ) 

1712 extra_params_specific.append(f"-VInstalled-Size={total_size}") 

1713 

1714 ma_value = binary_package.fields.get("Multi-Arch") 

1715 if not binary_package.is_udeb and ma_value is None: 

1716 ma_value = auto_compute_multi_arch(binary_package, control_output_dir, fs_root) 

1717 if ma_value is not None: 

1718 _info( 

1719 f'The package "{binary_package.name}" looks like it should be "Multi-Arch: {ma_value}" based' 

1720 ' on the contents and there is no explicit "Multi-Arch" field. Setting the Multi-Arch field' 

1721 ' accordingly in the binary. If this auto-correction is wrong, please add "Multi-Arch: no" to the' 

1722 ' relevant part of "debian/control" to disable this feature.' 

1723 ) 

1724 # We want this to apply to the `-dbgsym` package as well to avoid 

1725 # lintian `debug-package-for-multi-arch-same-pkg-not-coinstallable` 

1726 extra_common_params.append(f"-DMulti-Arch={ma_value}") 

1727 elif ma_value == "no": 

1728 extra_common_params.append("-UMulti-Arch") 

1729 

1730 dbgsym_ids = " ".join(dbgsym_build_ids) if dbgsym_build_ids else "" 

1731 if package_state.binary_version is not None: 

1732 extra_common_params.append(f"-v{package_state.binary_version}") 

1733 

1734 _t64_migration_substvar(binary_package, control_output_dir, substvars) 

1735 

1736 with substvars.flush() as flushed_substvars: 

1737 has_dbgsym = dbgsym_root_fs is not None and any( 

1738 f for f in dbgsym_root_fs.all_paths() if f.is_file 

1739 ) 

1740 dctrl_file = _handle_auto_substvars( 

1741 source_package, 

1742 binary_package, 

1743 substvars, 

1744 has_dbgsym, 

1745 ) 

1746 if dctrl_file is None: 

1747 dctrl_file = "debian/control" 

1748 

1749 if has_dbgsym: 

1750 assert dbgsym_root_fs is not None # mypy hint 

1751 dbgsym_ctrl_dir = binary_package_data.dbgsym_info.dbgsym_ctrl_dir 

1752 _generate_dbgsym_control_file_if_relevant( 

1753 binary_package, 

1754 dbgsym_root_fs, 

1755 dbgsym_ctrl_dir, 

1756 dbgsym_ids, 

1757 ma_value, 

1758 dctrl_file, 

1759 extra_common_params, 

1760 ) 

1761 generate_md5sums_file( 

1762 dbgsym_ctrl_dir, 

1763 dbgsym_root_fs, 

1764 ) 

1765 elif dbgsym_ids: 

1766 extra_common_params.append(f"-DBuild-Ids={dbgsym_ids}") 

1767 

1768 ctrl_file = os.path.join(control_output_dir.fs_path, "control") 

1769 dpkg_cmd = [ 

1770 "dpkg-gencontrol", 

1771 f"-p{package_name}", 

1772 # FIXME: Support d/<pkg>.changelog at some point. 

1773 "-ldebian/changelog", 

1774 f"-c{dctrl_file}", 

1775 f"-T{flushed_substvars}", 

1776 f"-O{ctrl_file}", 

1777 # Use a placeholder for -P to ensure failure if we forgot to override a path parameter 

1778 "-P/non-existent", 

1779 *extra_common_params, 

1780 *extra_params_specific, 

1781 ] 

1782 print_command(*dpkg_cmd) 

1783 try: 

1784 subprocess.check_call(dpkg_cmd) 

1785 except subprocess.CalledProcessError: 

1786 _error( 

1787 f"Attempting to generate DEBIAN/control file for {package_name} failed. Please review the output from " 

1788 " dpkg-gencontrol above to understand what went wrong." 

1789 ) 

1790 os.chmod(ctrl_file, 0o644) 

1791 

1792 if not binary_package.is_udeb: 

1793 generate_md5sums_file(control_output_dir, fs_root)