Coverage for src/debputy/deb_packaging_support.py: 24%

830 statements  

« prev     ^ index     » next       coverage.py v7.8.2, created at 2025-09-07 09:27 +0000

1import collections 

2import contextlib 

3import dataclasses 

4import datetime 

5import functools 

6import hashlib 

7import itertools 

8import operator 

9import os 

10import re 

11import shutil 

12import subprocess 

13import tempfile 

14import textwrap 

15from contextlib import ExitStack, suppress 

16from tempfile import mkstemp 

17from typing import ( 

18 Iterable, 

19 List, 

20 Optional, 

21 Set, 

22 Dict, 

23 Sequence, 

24 Tuple, 

25 Iterator, 

26 Literal, 

27 TypeVar, 

28 FrozenSet, 

29 cast, 

30 Any, 

31 Union, 

32 Mapping, 

33 AbstractSet, 

34 TYPE_CHECKING, 

35) 

36 

37import debian.deb822 

38from debian.changelog import Changelog 

39from debian.deb822 import Deb822 

40from debputy._deb_options_profiles import DebBuildOptionsAndProfiles 

41from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable 

42from debputy.elf_util import find_all_elf_files, ELF_MAGIC 

43from debputy.exceptions import DebputyDpkgGensymbolsError, PureVirtualPathError 

44from debputy.filesystem_scan import FSPath, FSControlRootDir, VirtualPathBase 

45from debputy.maintscript_snippet import ( 

46 ALL_CONTROL_SCRIPTS, 

47 MaintscriptSnippetContainer, 

48 STD_CONTROL_SCRIPTS, 

49) 

50from debputy.packager_provided_files import PackagerProvidedFile 

51from debputy.packages import BinaryPackage, SourcePackage 

52from debputy.packaging.alternatives import process_alternatives 

53from debputy.packaging.debconf_templates import process_debconf_templates 

54from debputy.packaging.makeshlibs import ( 

55 compute_shlibs, 

56 ShlibsContent, 

57 generate_shlib_dirs, 

58 resolve_reserved_provided_file, 

59) 

60from debputy.plugin.api.feature_set import PluginProvidedFeatureSet 

61from debputy.plugin.api.impl import ServiceRegistryImpl 

62from debputy.plugin.api.impl_types import ( 

63 MetadataOrMaintscriptDetector, 

64 PackageDataTable, 

65 ServiceManagerDetails, 

66) 

67from debputy.plugin.api.spec import ( 

68 FlushableSubstvars, 

69 VirtualPath, 

70 PackageProcessingContext, 

71 ServiceDefinition, 

72) 

73from debputy.plugins.debputy.binary_package_rules import ServiceRule 

74from debputy.util import ( 

75 _error, 

76 ensure_dir, 

77 assume_not_none, 

78 resolve_perl_config, 

79 perlxs_api_dependency, 

80 detect_fakeroot, 

81 grouper, 

82 _info, 

83 xargs, 

84 escape_shell, 

85 generated_content_dir, 

86 print_command, 

87 _warn, 

88) 

89 

90if TYPE_CHECKING: 

91 from debputy.highlevel_manifest import ( 

92 HighLevelManifest, 

93 PackageTransformationDefinition, 

94 BinaryPackageData, 

95 ) 

96 

97 

98VP = TypeVar("VP", bound=VirtualPath, covariant=True) 

99 

100_T64_REGEX = re.compile("^lib.*t64(?:-nss)?$") 

101_T64_PROVIDES = "t64:Provides" 

102 

103 

104def generate_md5sums_file( 

105 control_output_dir: VirtualPathBase, 

106 fs_root: VirtualPath, 

107) -> None: 

108 conffiles = control_output_dir.get("conffiles") 

109 exclude = set() 

110 if conffiles and conffiles.is_file: 

111 with conffiles.open() as fd: 

112 for line in fd: 

113 if not line.startswith("/"): 

114 continue 

115 exclude.add("." + line.rstrip("\n")) 

116 files_to_checksum = sorted( 

117 ( 

118 path 

119 for path in fs_root.all_paths() 

120 if path.is_file and path.path not in exclude 

121 ), 

122 # Sort in the same order as dh_md5sums, which is not quite the same as dpkg/`all_paths()` 

123 # Compare `.../doc/...` vs `.../doc-base/...` if you want to see the difference between 

124 # the two approaches. 

125 key=lambda p: p.path, 

126 ) 

127 if not files_to_checksum: 

128 return 

129 with control_output_dir.open_child("md5sums", "w") as md5fd: 

130 for member in files_to_checksum: 

131 path = member.path 

132 assert path.startswith("./") 

133 path = path[2:] 

134 with member.open(byte_io=True) as f: 

135 file_hash = hashlib.md5() 

136 while chunk := f.read(8192): 

137 file_hash.update(chunk) 

138 md5fd.write(f"{file_hash.hexdigest()} {path}\n") 

139 

140 

141def install_or_generate_conffiles( 

142 ctrl_root: Union[FSPath, FSControlRootDir], 

143 fs_root: VirtualPath, 

144 reserved_packager_provided_files: Dict[str, List[PackagerProvidedFile]], 

145) -> None: 

146 provided_conffiles_file = resolve_reserved_provided_file( 

147 "conffiles", 

148 reserved_packager_provided_files, 

149 ) 

150 if ( 150 ↛ 155line 150 didn't jump to line 155 because the condition on line 150 was never true

151 provided_conffiles_file 

152 and provided_conffiles_file.is_file 

153 and provided_conffiles_file.size > 0 

154 ): 

155 ctrl_root.insert_file_from_fs_path( 

156 "conffiles", 

157 provided_conffiles_file.fs_path, 

158 mode=0o644, 

159 reference_path=provided_conffiles_file, 

160 ) 

161 etc_dir = fs_root.lookup("etc") 

162 if etc_dir: 

163 _add_conffiles(ctrl_root, (p for p in etc_dir.all_paths() if p.is_file)) 

164 

165 

166PERL_DEP_PROGRAM = 1 

167PERL_DEP_INDEP_PM_MODULE = 2 

168PERL_DEP_XS_MODULE = 4 

169PERL_DEP_ARCH_PM_MODULE = 8 

170PERL_DEP_MA_ANY_INCOMPATIBLE_TYPES = ~(PERL_DEP_PROGRAM | PERL_DEP_INDEP_PM_MODULE) 

171 

172 

173@functools.lru_cache(2) # In practice, param will be "perl" or "perl-base" 

174def _dpkg_perl_version(package: str) -> str: 

175 dpkg_version = None 

176 lines = ( 

177 subprocess.check_output(["dpkg", "-s", package]) 

178 .decode("utf-8") 

179 .splitlines(keepends=False) 

180 ) 

181 for line in lines: 

182 if line.startswith("Version: "): 

183 dpkg_version = line[8:].strip() 

184 break 

185 assert dpkg_version is not None 

186 return dpkg_version 

187 

188 

189def handle_perl_code( 

190 dctrl_bin: BinaryPackage, 

191 dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable, 

192 fs_root: FSPath, 

193 substvars: FlushableSubstvars, 

194) -> None: 

195 perl_config_data = resolve_perl_config(dpkg_architecture_variables, dctrl_bin) 

196 detected_dep_requirements = 0 

197 

198 # MakeMaker always makes lib and share dirs, but typically only one directory is actually used. 

199 for perl_inc_dir in (perl_config_data.vendorarch, perl_config_data.vendorlib): 

200 p = fs_root.lookup(perl_inc_dir) 

201 if p and p.is_dir: 

202 p.prune_if_empty_dir() 

203 

204 # FIXME: 80% of this belongs in a metadata detector, but that requires us to expose .walk() in the public API, 

205 # which will not be today. 

206 for d, pm_mode in [ 

207 (perl_config_data.vendorlib, PERL_DEP_INDEP_PM_MODULE), 

208 (perl_config_data.vendorarch, PERL_DEP_ARCH_PM_MODULE), 

209 ]: 

210 inc_dir = fs_root.lookup(d) 

211 if not inc_dir: 

212 continue 

213 for path in inc_dir.all_paths(): 

214 if not path.is_file: 

215 continue 

216 if path.name.endswith(".so"): 

217 detected_dep_requirements |= PERL_DEP_XS_MODULE 

218 elif path.name.endswith(".pm"): 

219 detected_dep_requirements |= pm_mode 

220 

221 for path, children in fs_root.walk(): 

222 if path.path == "./usr/share/doc": 

223 children.clear() 

224 continue 

225 if ( 

226 not path.is_file 

227 or not path.has_fs_path 

228 or not (path.is_executable or path.name.endswith(".pl")) 

229 ): 

230 continue 

231 

232 interpreter = path.interpreter() 

233 if interpreter is not None and interpreter.command_full_basename == "perl": 

234 detected_dep_requirements |= PERL_DEP_PROGRAM 

235 

236 if not detected_dep_requirements: 

237 return 

238 dpackage = "perl" 

239 # FIXME: Currently, dh_perl supports perl-base via manual toggle. 

240 

241 dependency = dpackage 

242 if not (detected_dep_requirements & PERL_DEP_MA_ANY_INCOMPATIBLE_TYPES): 

243 dependency += ":any" 

244 

245 if detected_dep_requirements & PERL_DEP_XS_MODULE: 

246 dpkg_version = _dpkg_perl_version(dpackage) 

247 dependency += f" (>= {dpkg_version})" 

248 substvars.add_dependency("perl:Depends", dependency) 

249 

250 if detected_dep_requirements & (PERL_DEP_XS_MODULE | PERL_DEP_ARCH_PM_MODULE): 

251 substvars.add_dependency("perl:Depends", perlxs_api_dependency()) 

252 

253 

254def usr_local_transformation(dctrl: BinaryPackage, fs_root: VirtualPath) -> None: 

255 path = fs_root.lookup("./usr/local") 

256 if path and any(path.iterdir): 

257 # There are two key issues: 

258 # 1) Getting the generated maintscript carried on to the final maintscript 

259 # 2) Making sure that manifest created directories do not trigger the "unused error". 

260 _error( 

261 f"Replacement of /usr/local paths is currently not supported in debputy (triggered by: {dctrl.name})." 

262 ) 

263 

264 

265def _find_and_analyze_systemd_service_files( 

266 fs_root: VirtualPath, 

267 systemd_service_dir: Literal["system", "user"], 

268) -> Iterable[VirtualPath]: 

269 service_dirs = [ 

270 f"./usr/lib/systemd/{systemd_service_dir}", 

271 f"./lib/systemd/{systemd_service_dir}", 

272 ] 

273 aliases: Dict[str, List[str]] = collections.defaultdict(list) 

274 seen = set() 

275 all_files = [] 

276 

277 for d in service_dirs: 

278 system_dir = fs_root.lookup(d) 

279 if not system_dir: 

280 continue 

281 for child in system_dir.iterdir: 

282 if child.is_symlink: 

283 dest = os.path.basename(child.readlink()) 

284 aliases[dest].append(child.name) 

285 elif child.is_file and child.name not in seen: 

286 seen.add(child.name) 

287 all_files.append(child) 

288 

289 return all_files 

290 

291 

292def detect_systemd_user_service_files( 

293 dctrl: BinaryPackage, 

294 fs_root: VirtualPath, 

295) -> None: 

296 for service_file in _find_and_analyze_systemd_service_files(fs_root, "user"): 

297 _error( 

298 f'Sorry, systemd user services files are not supported at the moment (saw "{service_file.path}"' 

299 f" in {dctrl.name})" 

300 ) 

301 

302 

303# Generally, this should match the release date of oldstable or oldoldstable 

304_DCH_PRUNE_CUT_OFF_DATE = datetime.date(2019, 7, 6) 

305_DCH_MIN_NUM_OF_ENTRIES = 4 

306 

307 

308def _prune_dch_file( 

309 package: BinaryPackage, 

310 path: VirtualPath, 

311 is_changelog: bool, 

312 keep_versions: Optional[Set[str]], 

313 *, 

314 trim: bool = True, 

315) -> Tuple[bool, Optional[Set[str]]]: 

316 # TODO: Process `d/changelog` once 

317 # Note we cannot assume that changelog_file is always `d/changelog` as you can have 

318 # per-package changelogs. 

319 with path.open() as fd: 

320 dch = Changelog(fd) 

321 shortened = False 

322 important_entries = 0 

323 binnmu_entries = [] 

324 if is_changelog: 

325 kept_entries = [] 

326 for block in dch: 

327 if block.other_pairs.get("binary-only", "no") == "yes": 

328 # Always keep binNMU entries (they are always in the top) and they do not count 

329 # towards our kept_entries limit 

330 binnmu_entries.append(block) 

331 continue 

332 block_date = block.date 

333 if block_date is None: 

334 _error(f"The Debian changelog was missing date in sign off line") 

335 entry_date = datetime.datetime.strptime( 

336 block_date, "%a, %d %b %Y %H:%M:%S %z" 

337 ).date() 

338 if ( 

339 trim 

340 and entry_date < _DCH_PRUNE_CUT_OFF_DATE 

341 and important_entries >= _DCH_MIN_NUM_OF_ENTRIES 

342 ): 

343 shortened = True 

344 break 

345 # Match debhelper in incrementing after the check. 

346 important_entries += 1 

347 kept_entries.append(block) 

348 else: 

349 assert keep_versions is not None 

350 # The NEWS files should match the version for the dch to avoid lintian warnings. 

351 # If that means we remove all entries in the NEWS file, then we delete the NEWS 

352 # file (see #1021607) 

353 kept_entries = [b for b in dch if b.version in keep_versions] 

354 shortened = len(dch) > len(kept_entries) 

355 if shortened and not kept_entries: 

356 path.unlink() 

357 return True, None 

358 

359 if not shortened and not binnmu_entries: 

360 return False, None 

361 

362 parent_dir = assume_not_none(path.parent_dir) 

363 

364 with ( 

365 path.replace_fs_path_content() as fs_path, 

366 open(fs_path, "wt", encoding="utf-8") as fd, 

367 ): 

368 for entry in kept_entries: 

369 fd.write(str(entry)) 

370 

371 if is_changelog and shortened: 

372 # For changelog (rather than NEWS) files, add a note about how to 

373 # get the full version. 

374 msg = textwrap.dedent( 

375 f"""\ 

376 # Older entries have been removed from this changelog. 

377 # To read the complete changelog use `apt changelog {package.name}`. 

378 """ 

379 ) 

380 fd.write(msg) 

381 

382 if binnmu_entries: 

383 if package.is_arch_all: 

384 _error( 

385 f"The package {package.name} is architecture all, but it is built during a binNMU. A binNMU build" 

386 " must not include architecture all packages" 

387 ) 

388 

389 with ( 

390 parent_dir.add_file( 

391 f"{path.name}.{package.resolved_architecture}" 

392 ) as binnmu_changelog, 

393 open( 

394 binnmu_changelog.fs_path, 

395 "wt", 

396 encoding="utf-8", 

397 ) as binnmu_fd, 

398 ): 

399 for entry in binnmu_entries: 

400 binnmu_fd.write(str(entry)) 

401 

402 if not shortened: 

403 return False, None 

404 return True, {b.version for b in kept_entries} 

405 

406 

407def fixup_debian_changelog_and_news_file( 

408 dctrl: BinaryPackage, 

409 fs_root: VirtualPath, 

410 is_native: bool, 

411 build_env: DebBuildOptionsAndProfiles, 

412) -> None: 

413 doc_dir = fs_root.lookup(f"./usr/share/doc/{dctrl.name}") 

414 if not doc_dir: 

415 return 

416 changelog = doc_dir.get("changelog.Debian") 

417 if changelog and is_native: 

418 changelog.name = "changelog" 

419 elif is_native: 

420 changelog = doc_dir.get("changelog") 

421 

422 trim = False if "notrimdch" in build_env.deb_build_options else True 

423 

424 kept_entries = None 

425 pruned_changelog = False 

426 if changelog and changelog.has_fs_path: 

427 pruned_changelog, kept_entries = _prune_dch_file( 

428 dctrl, changelog, True, None, trim=trim 

429 ) 

430 

431 if not trim: 

432 return 

433 

434 news_file = doc_dir.get("NEWS.Debian") 

435 if news_file and news_file.has_fs_path and pruned_changelog: 

436 _prune_dch_file(dctrl, news_file, False, kept_entries) 

437 

438 

439_UPSTREAM_CHANGELOG_SOURCE_DIRS = [ 

440 ".", 

441 "doc", 

442 "docs", 

443] 

444_UPSTREAM_CHANGELOG_NAMES = { 

445 # The value is a priority to match the debhelper order. 

446 # - The suffix weights heavier than the basename (because that is what debhelper did) 

447 # 

448 # We list the name/suffix in order of priority in the code. That makes it easier to 

449 # see the priority directly, but it gives the "lowest" value to the most important items 

450 f"{n}{s}": (sw, nw) 

451 for (nw, n), (sw, s) in itertools.product( 

452 enumerate(["changelog", "changes", "history"], start=1), 

453 enumerate(["", ".txt", ".md", ".rst"], start=1), 

454 ) 

455} 

456_NONE_TUPLE = (None, (0, 0)) 

457 

458 

459def _detect_upstream_changelog(names: Iterable[str]) -> Optional[str]: 

460 matches = [] 

461 for name in names: 

462 match_priority = _UPSTREAM_CHANGELOG_NAMES.get(name.lower()) 

463 if match_priority is not None: 

464 matches.append((name, match_priority)) 

465 return min(matches, default=_NONE_TUPLE, key=operator.itemgetter(1))[0] 

466 

467 

468def install_upstream_changelog( 

469 dctrl_bin: BinaryPackage, 

470 fs_root: FSPath, 

471 source_fs_root: VirtualPath, 

472) -> None: 

473 doc_dir = f"./usr/share/doc/{dctrl_bin.name}" 

474 bdir = fs_root.lookup(doc_dir) 

475 if bdir and not bdir.is_dir: 

476 # "/usr/share/doc/foo -> bar" symlink. Avoid croaking on those per: 

477 # https://salsa.debian.org/debian/debputy/-/issues/49 

478 return 

479 

480 if bdir: 

481 if bdir.get("changelog") or bdir.get("changelog.gz"): 

482 # Upstream's build system already provided the changelog with the correct name. 

483 # Accept that as the canonical one. 

484 return 

485 upstream_changelog = _detect_upstream_changelog( 

486 p.name for p in bdir.iterdir if p.is_file and p.has_fs_path and p.size > 0 

487 ) 

488 if upstream_changelog: 

489 p = bdir.lookup(upstream_changelog) 

490 assert p is not None # Mostly as a typing hint 

491 p.name = "changelog" 

492 return 

493 for dirname in _UPSTREAM_CHANGELOG_SOURCE_DIRS: 

494 dir_path = source_fs_root.lookup(dirname) 

495 if not dir_path or not dir_path.is_dir: 

496 continue 

497 changelog_name = _detect_upstream_changelog( 

498 p.name 

499 for p in dir_path.iterdir 

500 if p.is_file and p.has_fs_path and p.size > 0 

501 ) 

502 if changelog_name: 

503 if bdir is None: 503 ↛ 505line 503 didn't jump to line 505 because the condition on line 503 was always true

504 bdir = fs_root.mkdirs(doc_dir) 

505 bdir.insert_file_from_fs_path( 

506 "changelog", 

507 dir_path[changelog_name].fs_path, 

508 ) 

509 break 

510 

511 

512@dataclasses.dataclass(slots=True) 

513class _ElfInfo: 

514 path: VirtualPath 

515 fs_path: str 

516 is_stripped: Optional[bool] = None 

517 build_id: Optional[str] = None 

518 dbgsym: Optional[FSPath] = None 

519 

520 

521def _elf_static_lib_walk_filter( 

522 fs_path: VirtualPath, 

523 children: List[VP], 

524) -> bool: 

525 if ( 

526 fs_path.name == ".build-id" 

527 and assume_not_none(fs_path.parent_dir).name == "debug" 

528 ): 

529 children.clear() 

530 return False 

531 # Deal with some special cases, where certain files are not supposed to be stripped in a given directory 

532 if "debug/" in fs_path.path or fs_path.name.endswith("debug/"): 

533 # FIXME: We need a way to opt out of this per #468333/#1016122 

534 for so_file in (f for f in list(children) if f.name.endswith(".so")): 

535 children.remove(so_file) 

536 if "/guile/" in fs_path.path or fs_path.name == "guile": 

537 for go_file in (f for f in list(children) if f.name.endswith(".go")): 

538 children.remove(go_file) 

539 return True 

540 

541 

542@contextlib.contextmanager 

543def _all_elf_files(fs_root: VirtualPath) -> Iterator[Dict[str, _ElfInfo]]: 

544 all_elf_files = find_all_elf_files( 

545 fs_root, 

546 walk_filter=_elf_static_lib_walk_filter, 

547 ) 

548 if not all_elf_files: 

549 yield {} 

550 return 

551 with ExitStack() as cm_stack: 

552 resolved = ( 

553 (p, cm_stack.enter_context(p.replace_fs_path_content())) 

554 for p in all_elf_files 

555 ) 

556 elf_info = { 

557 fs_path: _ElfInfo( 

558 path=assume_not_none(fs_root.lookup(detached_path.path)), 

559 fs_path=fs_path, 

560 ) 

561 for detached_path, fs_path in resolved 

562 } 

563 _resolve_build_ids(elf_info) 

564 yield elf_info 

565 

566 

567def _find_all_static_libs( 

568 fs_root: FSPath, 

569) -> Iterator[FSPath]: 

570 for path, children in fs_root.walk(): 

571 # Matching the logic of dh_strip for now. 

572 if not _elf_static_lib_walk_filter(path, children): 

573 continue 

574 if not path.is_file: 

575 continue 

576 if path.name.startswith("lib") and path.name.endswith("_g.a"): 

577 # _g.a are historically ignored. I do not remember why, but guessing the "_g" is 

578 # an encoding of gcc's -g parameter into the filename (with -g meaning "I want debug 

579 # symbols") 

580 continue 

581 if not path.has_fs_path: 

582 continue 

583 with path.open(byte_io=True) as fd: 

584 magic = fd.read(8) 

585 if magic not in (b"!<arch>\n", b"!<thin>\n"): 

586 continue 

587 # Maybe we should see if the first file looks like an index file. 

588 # Three random .a samples suggests the index file is named "/" 

589 # Not sure if we should skip past it and then do the ELF check or just assume 

590 # that "index => static lib". 

591 data = fd.read(1024 * 1024) 

592 if b"\0" not in data and ELF_MAGIC not in data: 

593 continue 

594 yield path 

595 

596 

597@contextlib.contextmanager 

598def _all_static_libs(fs_root: FSPath) -> Iterator[List[str]]: 

599 all_static_libs = list(_find_all_static_libs(fs_root)) 

600 if not all_static_libs: 

601 yield [] 

602 return 

603 with ExitStack() as cm_stack: 

604 resolved: List[str] = [ 

605 cm_stack.enter_context(p.replace_fs_path_content()) for p in all_static_libs 

606 ] 

607 yield resolved 

608 

609 

610_FILE_BUILD_ID_RE = re.compile(rb"BuildID(?:\[\S+\])?=([A-Fa-f0-9]+)") 

611 

612 

613def _resolve_build_ids(elf_info: Dict[str, _ElfInfo]) -> None: 

614 static_cmd = ["file", "-00", "-N"] 

615 if detect_fakeroot(): 

616 static_cmd.append("--no-sandbox") 

617 

618 for cmd in xargs(static_cmd, (i.fs_path for i in elf_info.values())): 

619 _info(f"Looking up build-ids via: {escape_shell(*cmd)}") 

620 output = subprocess.check_output(cmd) 

621 

622 # Trailing "\0" gives an empty element in the end when splitting, so strip it out 

623 lines = output.rstrip(b"\0").split(b"\0") 

624 

625 for fs_path_b, verdict in grouper(lines, 2, incomplete="strict"): 

626 fs_path = fs_path_b.decode("utf-8") 

627 info = elf_info[fs_path] 

628 info.is_stripped = b"not stripped" not in verdict 

629 m = _FILE_BUILD_ID_RE.search(verdict) 

630 if m: 

631 info.build_id = m.group(1).decode("utf-8") 

632 

633 

634def _make_debug_file( 

635 objcopy: str, fs_path: str, build_id: str, dbgsym_fs_root: FSPath 

636) -> FSPath: 

637 dbgsym_dirname = f"./usr/lib/debug/.build-id/{build_id[0:2]}/" 

638 dbgsym_basename = f"{build_id[2:]}.debug" 

639 dbgsym_dir = dbgsym_fs_root.mkdirs(dbgsym_dirname) 

640 if dbgsym_basename in dbgsym_dir: 

641 return dbgsym_dir[dbgsym_basename] 

642 # objcopy is a pain and includes the basename verbatim when you do `--add-gnu-debuglink` without having an option 

643 # to overwrite the physical basename. So we have to ensure that the physical basename matches the installed 

644 # basename. 

645 with dbgsym_dir.add_file( 

646 dbgsym_basename, 

647 unlink_if_exists=False, 

648 fs_basename_matters=True, 

649 subdir_key="dbgsym-build-ids", 

650 ) as dbgsym: 

651 try: 

652 subprocess.check_call( 

653 [ 

654 objcopy, 

655 "--only-keep-debug", 

656 "--compress-debug-sections", 

657 fs_path, 

658 dbgsym.fs_path, 

659 ] 

660 ) 

661 except subprocess.CalledProcessError: 

662 full_command = ( 

663 f"{objcopy} --only-keep-debug --compress-debug-sections" 

664 f" {escape_shell(fs_path, dbgsym.fs_path)}" 

665 ) 

666 _error( 

667 f"Attempting to create a .debug file failed. Please review the error message from {objcopy} to" 

668 f" understand what went wrong. Full command was: {full_command}" 

669 ) 

670 return dbgsym 

671 

672 

673def _strip_binary(strip: str, options: List[str], paths: Iterable[str]) -> None: 

674 # We assume the paths are obtained via `p.replace_fs_path_content()`, 

675 # which is the case at the time of written and should remain so forever. 

676 it = iter(paths) 

677 first = next(it, None) 

678 if first is None: 

679 return 

680 static_cmd = [strip] 

681 static_cmd.extend(options) 

682 

683 for cmd in xargs(static_cmd, itertools.chain((first,), (f for f in it))): 

684 _info(f"Removing unnecessary ELF debug info via: {escape_shell(*cmd)}") 

685 try: 

686 subprocess.check_call( 

687 cmd, 

688 stdin=subprocess.DEVNULL, 

689 restore_signals=True, 

690 ) 

691 except subprocess.CalledProcessError: 

692 _error( 

693 f"Attempting to remove ELF debug info failed. Please review the error from {strip} above" 

694 f" understand what went wrong." 

695 ) 

696 

697 

698def _attach_debug(objcopy: str, elf_binary: VirtualPath, dbgsym: FSPath) -> None: 

699 dbgsym_fs_path: str 

700 with dbgsym.replace_fs_path_content() as dbgsym_fs_path: 

701 cmd = [objcopy, "--add-gnu-debuglink", dbgsym_fs_path, elf_binary.fs_path] 

702 print_command(*cmd) 

703 try: 

704 subprocess.check_call(cmd) 

705 except subprocess.CalledProcessError: 

706 _error( 

707 f"Attempting to attach ELF debug link to ELF binary failed. Please review the error from {objcopy}" 

708 f" above understand what went wrong." 

709 ) 

710 

711 

712@functools.lru_cache() 

713def _has_tool(tool: str) -> bool: 

714 return shutil.which(tool) is not None 

715 

716 

717def _run_dwz( 

718 dctrl: BinaryPackage, 

719 dbgsym_fs_root: FSPath, 

720 unstripped_elf_info: List[_ElfInfo], 

721) -> None: 

722 if not unstripped_elf_info or dctrl.is_udeb or not _has_tool("dwz"): 

723 return 

724 dwz_cmd = ["dwz"] 

725 dwz_ma_dir_name = f"usr/lib/debug/.dwz/{dctrl.deb_multiarch}" 

726 dwz_ma_basename = f"{dctrl.name}.debug" 

727 multifile = f"{dwz_ma_dir_name}/{dwz_ma_basename}" 

728 build_time_multifile = None 

729 if len(unstripped_elf_info) > 1: 

730 fs_content_dir = generated_content_dir() 

731 fd, build_time_multifile = mkstemp(suffix=dwz_ma_basename, dir=fs_content_dir) 

732 os.close(fd) 

733 dwz_cmd.append(f"-m{build_time_multifile}") 

734 dwz_cmd.append(f"-M/{multifile}") 

735 

736 # TODO: configuration for disabling multi-file and tweaking memory limits 

737 

738 dwz_cmd.extend(e.fs_path for e in unstripped_elf_info) 

739 

740 _info(f"Deduplicating ELF debug info via: {escape_shell(*dwz_cmd)}") 

741 try: 

742 subprocess.check_call(dwz_cmd) 

743 except subprocess.CalledProcessError: 

744 _error( 

745 "Attempting to deduplicate ELF info via dwz failed. Please review the output from dwz above" 

746 " to understand what went wrong." 

747 ) 

748 if build_time_multifile is not None and os.stat(build_time_multifile).st_size > 0: 

749 dwz_dir = dbgsym_fs_root.mkdirs(dwz_ma_dir_name) 

750 dwz_dir.insert_file_from_fs_path( 

751 dwz_ma_basename, 

752 build_time_multifile, 

753 mode=0o644, 

754 require_copy_on_write=False, 

755 follow_symlinks=False, 

756 ) 

757 

758 

759def relocate_dwarves_into_dbgsym_packages( 

760 dctrl: BinaryPackage, 

761 package_fs_root: FSPath, 

762 dbgsym_fs_root: VirtualPath, 

763 *, 

764 run_dwz: bool = False, 

765) -> List[str]: 

766 # FIXME: hardlinks 

767 with _all_static_libs(package_fs_root) as all_static_files: 

768 if all_static_files: 

769 strip = dctrl.cross_command("strip") 

770 _strip_binary( 

771 strip, 

772 [ 

773 "--strip-debug", 

774 "--remove-section=.comment", 

775 "--remove-section=.note", 

776 "--enable-deterministic-archives", 

777 "-R", 

778 ".gnu.lto_*", 

779 "-R", 

780 ".gnu.debuglto_*", 

781 "-N", 

782 "__gnu_lto_slim", 

783 "-N", 

784 "__gnu_lto_v1", 

785 ], 

786 all_static_files, 

787 ) 

788 

789 with _all_elf_files(package_fs_root) as all_elf_files: 

790 if not all_elf_files: 

791 return [] 

792 objcopy = dctrl.cross_command("objcopy") 

793 strip = dctrl.cross_command("strip") 

794 unstripped_elf_info = list( 

795 e for e in all_elf_files.values() if not e.is_stripped 

796 ) 

797 

798 if run_dwz: 

799 _run_dwz(dctrl, dbgsym_fs_root, unstripped_elf_info) 

800 

801 for elf_info in unstripped_elf_info: 

802 elf_info.dbgsym = _make_debug_file( 

803 objcopy, 

804 elf_info.fs_path, 

805 assume_not_none(elf_info.build_id), 

806 dbgsym_fs_root, 

807 ) 

808 

809 # Note: When run strip, we do so also on already stripped ELF binaries because that is what debhelper does! 

810 # Executables (defined by mode) 

811 _strip_binary( 

812 strip, 

813 ["--remove-section=.comment", "--remove-section=.note"], 

814 (i.fs_path for i in all_elf_files.values() if i.path.is_executable), 

815 ) 

816 

817 # Libraries (defined by mode) 

818 _strip_binary( 

819 strip, 

820 ["--remove-section=.comment", "--remove-section=.note", "--strip-unneeded"], 

821 (i.fs_path for i in all_elf_files.values() if not i.path.is_executable), 

822 ) 

823 

824 for elf_info in unstripped_elf_info: 

825 _attach_debug( 

826 objcopy, 

827 assume_not_none(elf_info.path), 

828 assume_not_none(elf_info.dbgsym), 

829 ) 

830 

831 # Set for uniqueness 

832 all_debug_info = sorted( 

833 {assume_not_none(i.build_id) for i in unstripped_elf_info} 

834 ) 

835 

836 dbgsym_doc_dir = dbgsym_fs_root.mkdirs("./usr/share/doc/") 

837 dbgsym_doc_dir.add_symlink(f"{dctrl.name}-dbgsym", dctrl.name) 

838 return all_debug_info 

839 

840 

841def run_package_processors( 

842 manifest: "HighLevelManifest", 

843 package_metadata_context: PackageProcessingContext, 

844 fs_root: VirtualPath, 

845) -> None: 

846 pppps = manifest.plugin_provided_feature_set.package_processors_in_order() 

847 binary_package = package_metadata_context.binary_package 

848 for pppp in pppps: 

849 if not pppp.applies_to(binary_package): 

850 continue 

851 pppp.run_package_processor(fs_root, None, package_metadata_context) 

852 

853 

854def cross_package_control_files( 

855 package_data_table: PackageDataTable, 

856 manifest: "HighLevelManifest", 

857) -> None: 

858 errors = [] 

859 combined_shlibs = ShlibsContent() 

860 shlibs_dir = None 

861 shlib_dirs: List[str] = [] 

862 shlibs_local = manifest.debian_dir.get("shlibs.local") 

863 if shlibs_local and shlibs_local.is_file: 

864 with shlibs_local.open() as fd: 

865 combined_shlibs.add_entries_from_shlibs_file(fd) 

866 

867 debputy_plugin_metadata = manifest.plugin_provided_feature_set.plugin_data[ 

868 "debputy" 

869 ] 

870 

871 for binary_package_data in package_data_table: 

872 binary_package = binary_package_data.binary_package 

873 if binary_package.is_arch_all or not binary_package.should_be_acted_on: 

874 continue 

875 fs_root = binary_package_data.fs_root 

876 package_state = manifest.package_state_for(binary_package.name) 

877 related_udeb_package = ( 

878 binary_package_data.package_metadata_context.related_udeb_package 

879 ) 

880 

881 udeb_package_name = related_udeb_package.name if related_udeb_package else None 

882 ctrl = binary_package_data.ctrl_creator.for_plugin( 

883 debputy_plugin_metadata, 

884 "compute_shlibs", 

885 ) 

886 try: 

887 soname_info_list = compute_shlibs( 

888 binary_package, 

889 binary_package_data.control_output_dir.fs_path, 

890 fs_root, 

891 manifest, 

892 udeb_package_name, 

893 ctrl, 

894 package_state.reserved_packager_provided_files, 

895 combined_shlibs, 

896 ) 

897 except DebputyDpkgGensymbolsError as e: 

898 errors.append(e.message) 

899 else: 

900 if soname_info_list: 

901 if shlibs_dir is None: 

902 shlibs_dir = generated_content_dir( 

903 subdir_key="_shlibs_materialization_dir" 

904 ) 

905 generate_shlib_dirs( 

906 binary_package, 

907 shlibs_dir, 

908 soname_info_list, 

909 shlib_dirs, 

910 ) 

911 if errors: 

912 for error in errors: 

913 _warn(error) 

914 _error("Stopping due to the errors above") 

915 

916 generated_shlibs_local = None 

917 if combined_shlibs: 

918 if shlibs_dir is None: 

919 shlibs_dir = generated_content_dir(subdir_key="_shlibs_materialization_dir") 

920 generated_shlibs_local = os.path.join(shlibs_dir, "shlibs.local") 

921 with open(generated_shlibs_local, "wt", encoding="utf-8") as fd: 

922 combined_shlibs.write_to(fd) 

923 _info(f"Generated {generated_shlibs_local} for dpkg-shlibdeps") 

924 

925 for binary_package_data in package_data_table: 

926 binary_package = binary_package_data.binary_package 

927 if binary_package.is_arch_all or not binary_package.should_be_acted_on: 

928 continue 

929 binary_package_data.ctrl_creator.shlibs_details = ( 

930 generated_shlibs_local, 

931 shlib_dirs, 

932 ) 

933 

934 

935def _relevant_service_definitions( 

936 service_rule: ServiceRule, 

937 service_managers: Union[List[str], FrozenSet[str]], 

938 by_service_manager_key: Mapping[ 

939 Tuple[str, str, str, str], Tuple[ServiceManagerDetails, ServiceDefinition[Any]] 

940 ], 

941 aliases: Mapping[str, Sequence[Tuple[str, str, str, str]]], 

942) -> Iterable[Tuple[Tuple[str, str, str, str], ServiceDefinition[Any]]]: 

943 as_keys = (key for key in aliases[service_rule.service]) 

944 

945 pending_queue = { 

946 key 

947 for key in as_keys 

948 if key in by_service_manager_key 

949 and service_rule.applies_to_service_manager(key[-1]) 

950 } 

951 relevant_names: Dict[Tuple[str, str, str, str], ServiceDefinition[Any]] = {} 

952 seen_keys = set() 

953 

954 if not pending_queue: 

955 service_manager_names = ", ".join(sorted(service_managers)) 

956 _error( 

957 f"No none of the service managers ({service_manager_names}) detected a service named" 

958 f" {service_rule.service} (type: {service_rule.type_of_service}, scope: {service_rule.service_scope})," 

959 f" but the manifest definition at {service_rule.definition_source} requested that." 

960 ) 

961 

962 while pending_queue: 

963 next_key = pending_queue.pop() 

964 seen_keys.add(next_key) 

965 _, definition = by_service_manager_key[next_key] 

966 yield next_key, definition 

967 for name in definition.names: 

968 for target_key in aliases[name]: 

969 if ( 

970 target_key not in seen_keys 

971 and service_rule.applies_to_service_manager(target_key[-1]) 

972 ): 

973 pending_queue.add(target_key) 

974 

975 return relevant_names.items() 

976 

977 

978def handle_service_management( 

979 binary_package_data: "BinaryPackageData", 

980 manifest: "HighLevelManifest", 

981 package_metadata_context: PackageProcessingContext, 

982 fs_root: VirtualPath, 

983 feature_set: PluginProvidedFeatureSet, 

984) -> None: 

985 

986 by_service_manager_key = {} 

987 aliases_by_name = collections.defaultdict(list) 

988 

989 state = manifest.package_state_for(binary_package_data.binary_package.name) 

990 all_service_managers = list(feature_set.service_managers) 

991 requested_service_rules = state.requested_service_rules 

992 for requested_service_rule in requested_service_rules: 

993 if not requested_service_rule.service_managers: 

994 continue 

995 for manager in requested_service_rule.service_managers: 

996 if manager not in feature_set.service_managers: 

997 # FIXME: Missing definition source; move to parsing. 

998 _error( 

999 f"Unknown service manager {manager} used at {requested_service_rule.definition_source}" 

1000 ) 

1001 

1002 for service_manager_details in feature_set.service_managers.values(): 

1003 service_registry: ServiceRegistryImpl = ServiceRegistryImpl( 

1004 service_manager_details 

1005 ) 

1006 service_manager_details.service_detector( 

1007 fs_root, 

1008 service_registry, 

1009 package_metadata_context, 

1010 ) 

1011 

1012 service_definitions = service_registry.detected_services 

1013 if not service_definitions: 

1014 continue 

1015 

1016 for plugin_provided_definition in service_definitions: 

1017 key = ( 

1018 plugin_provided_definition.name, 

1019 plugin_provided_definition.type_of_service, 

1020 plugin_provided_definition.service_scope, 

1021 service_manager_details.service_manager, 

1022 ) 

1023 by_service_manager_key[key] = ( 

1024 service_manager_details, 

1025 plugin_provided_definition, 

1026 ) 

1027 

1028 for name in plugin_provided_definition.names: 

1029 aliases_by_name[name].append(key) 

1030 

1031 for requested_service_rule in requested_service_rules: 

1032 explicit_service_managers = requested_service_rule.service_managers is not None 

1033 related_service_managers = ( 

1034 requested_service_rule.service_managers or all_service_managers 

1035 ) 

1036 seen_service_managers = set() 

1037 for service_key, service_definition in _relevant_service_definitions( 

1038 requested_service_rule, 

1039 related_service_managers, 

1040 by_service_manager_key, 

1041 aliases_by_name, 

1042 ): 

1043 sm = service_key[-1] 

1044 seen_service_managers.add(sm) 

1045 by_service_manager_key[service_key] = ( 

1046 by_service_manager_key[service_key][0], 

1047 requested_service_rule.apply_to_service_definition(service_definition), 

1048 ) 

1049 if ( 

1050 explicit_service_managers 

1051 and seen_service_managers != related_service_managers 

1052 ): 

1053 missing_sms = ", ".join( 

1054 sorted(related_service_managers - seen_service_managers) 

1055 ) 

1056 _error( 

1057 f"The rule {requested_service_rule.definition_source} explicitly requested which service managers" 

1058 f" it should apply to. However, the following service managers did not provide a service of that" 

1059 f" name, type and scope: {missing_sms}. Please check the rule is correct and either provide the" 

1060 f" missing service or update the definition match the relevant services." 

1061 ) 

1062 

1063 per_service_manager = {} 

1064 

1065 for ( 

1066 service_manager_details, 

1067 plugin_provided_definition, 

1068 ) in by_service_manager_key.values(): 

1069 service_manager = service_manager_details.service_manager 

1070 if service_manager not in per_service_manager: 

1071 per_service_manager[service_manager] = ( 

1072 service_manager_details, 

1073 [plugin_provided_definition], 

1074 ) 

1075 else: 

1076 per_service_manager[service_manager][1].append(plugin_provided_definition) 

1077 

1078 for ( 

1079 service_manager_details, 

1080 final_service_definitions, 

1081 ) in per_service_manager.values(): 

1082 ctrl = binary_package_data.ctrl_creator.for_plugin( 

1083 service_manager_details.plugin_metadata, 

1084 service_manager_details.service_manager, 

1085 default_snippet_order="service", 

1086 ) 

1087 _info(f"Applying {final_service_definitions}") 

1088 service_manager_details.service_integrator( 

1089 final_service_definitions, 

1090 ctrl, 

1091 package_metadata_context, 

1092 ) 

1093 

1094 

1095def setup_control_files( 

1096 binary_package_data: "BinaryPackageData", 

1097 manifest: "HighLevelManifest", 

1098 dbgsym_fs_root: VirtualPath, 

1099 dbgsym_ids: List[str], 

1100 package_metadata_context: PackageProcessingContext, 

1101 *, 

1102 allow_ctrl_file_management: bool = True, 

1103) -> None: 

1104 binary_package = package_metadata_context.binary_package 

1105 control_output_dir = binary_package_data.control_output_dir 

1106 control_output_fs_path = control_output_dir.fs_path 

1107 fs_root = binary_package_data.fs_root 

1108 package_state = manifest.package_state_for(binary_package.name) 

1109 

1110 feature_set: PluginProvidedFeatureSet = manifest.plugin_provided_feature_set 

1111 metadata_maintscript_detectors = feature_set.metadata_maintscript_detectors 

1112 substvars = binary_package_data.substvars 

1113 

1114 snippets = STD_CONTROL_SCRIPTS 

1115 generated_triggers = list(binary_package_data.ctrl_creator.generated_triggers()) 

1116 

1117 if binary_package.is_udeb: 

1118 # FIXME: Add missing udeb scripts 

1119 snippets = ["postinst"] 

1120 

1121 if allow_ctrl_file_management: 

1122 process_alternatives( 

1123 binary_package, 

1124 fs_root, 

1125 package_state.reserved_packager_provided_files, 

1126 package_state.maintscript_snippets, 

1127 substvars, 

1128 ) 

1129 process_debconf_templates( 

1130 binary_package, 

1131 package_state.reserved_packager_provided_files, 

1132 package_state.maintscript_snippets, 

1133 substvars, 

1134 control_output_fs_path, 

1135 ) 

1136 

1137 handle_service_management( 

1138 binary_package_data, 

1139 manifest, 

1140 package_metadata_context, 

1141 fs_root, 

1142 feature_set, 

1143 ) 

1144 

1145 plugin_detector_definition: MetadataOrMaintscriptDetector 

1146 for plugin_detector_definition in itertools.chain.from_iterable( 

1147 metadata_maintscript_detectors.values() 

1148 ): 

1149 if not plugin_detector_definition.applies_to(binary_package): 

1150 continue 

1151 ctrl = binary_package_data.ctrl_creator.for_plugin( 

1152 plugin_detector_definition.plugin_metadata, 

1153 plugin_detector_definition.detector_id, 

1154 ) 

1155 plugin_detector_definition.run_detector( 

1156 fs_root, ctrl, package_metadata_context 

1157 ) 

1158 

1159 for script in snippets: 

1160 _generate_snippet( 

1161 control_output_fs_path, 

1162 script, 

1163 package_state.maintscript_snippets, 

1164 ) 

1165 

1166 else: 

1167 state = manifest.package_state_for(binary_package_data.binary_package.name) 

1168 if state.requested_service_rules: 

1169 service_source = state.requested_service_rules[0].definition_source 

1170 _error( 

1171 f"Use of service definitions (such as {service_source}) is not supported in this integration mode" 

1172 ) 

1173 for script, snippet_container in package_state.maintscript_snippets.items(): 

1174 for snippet in snippet_container.all_snippets(): 

1175 source = snippet.definition_source 

1176 _error( 

1177 f"This integration mode cannot use maintscript snippets" 

1178 f' (since dh_installdeb has already been called). However, "{source}" triggered' 

1179 f" a snippet for {script}. Please remove the offending definition if it is from" 

1180 f" the manifest or file a bug if it is caused by a built-in rule." 

1181 ) 

1182 

1183 for trigger in generated_triggers: 

1184 source = f"{trigger.provider.plugin_name}:{trigger.provider_source_id}" 

1185 _error( 

1186 f"This integration mode must not generate triggers" 

1187 f' (since dh_installdeb has already been called). However, "{source}" created' 

1188 f" a trigger. Please remove the offending definition if it is from" 

1189 f" the manifest or file a bug if it is caused by a built-in rule." 

1190 ) 

1191 

1192 shlibdeps_definition = [ 

1193 d 

1194 for d in metadata_maintscript_detectors["debputy"] 

1195 if d.detector_id == "dpkg-shlibdeps" 

1196 ][0] 

1197 

1198 ctrl = binary_package_data.ctrl_creator.for_plugin( 

1199 shlibdeps_definition.plugin_metadata, 

1200 shlibdeps_definition.detector_id, 

1201 ) 

1202 shlibdeps_definition.run_detector(fs_root, ctrl, package_metadata_context) 

1203 

1204 dh_staging_dir = os.path.join("debian", binary_package.name, "DEBIAN") 

1205 try: 

1206 with os.scandir(dh_staging_dir) as it: 

1207 existing_control_files = [ 

1208 f.path 

1209 for f in it 

1210 if f.is_file(follow_symlinks=False) 

1211 and f.name not in ("control", "md5sums") 

1212 ] 

1213 except FileNotFoundError: 

1214 existing_control_files = [] 

1215 

1216 if existing_control_files: 

1217 cmd = ["cp", "-a"] 

1218 cmd.extend(existing_control_files) 

1219 cmd.append(control_output_fs_path) 

1220 print_command(*cmd) 

1221 subprocess.check_call(cmd) 

1222 

1223 if binary_package.is_udeb: 

1224 _generate_control_files( 

1225 binary_package_data, 

1226 package_state, 

1227 control_output_dir, 

1228 fs_root, 

1229 substvars, 

1230 # We never built udebs due to #797391, so skip over this information, 

1231 # when creating the udeb 

1232 None, 

1233 None, 

1234 ) 

1235 return 

1236 

1237 if generated_triggers: 

1238 assert allow_ctrl_file_management 

1239 dest_file = os.path.join(control_output_fs_path, "triggers") 

1240 with open(dest_file, "at", encoding="utf-8") as fd: 

1241 fd.writelines( 

1242 textwrap.dedent( 

1243 f"""\ 

1244 # Added by {t.provider_source_id} from {t.provider.plugin_name} 

1245 {t.dpkg_trigger_type} {t.dpkg_trigger_target} 

1246 """ 

1247 ) 

1248 for t in generated_triggers 

1249 ) 

1250 os.chmod(fd.fileno(), 0o644) 

1251 

1252 if allow_ctrl_file_management: 

1253 install_or_generate_conffiles( 

1254 control_output_dir, 

1255 fs_root, 

1256 package_state.reserved_packager_provided_files, 

1257 ) 

1258 

1259 _generate_control_files( 

1260 binary_package_data, 

1261 package_state, 

1262 control_output_dir, 

1263 fs_root, 

1264 substvars, 

1265 dbgsym_fs_root, 

1266 dbgsym_ids, 

1267 ) 

1268 

1269 

1270def _generate_snippet( 

1271 control_output_dir: str, 

1272 script: str, 

1273 maintscript_snippets: Dict[str, MaintscriptSnippetContainer], 

1274) -> None: 

1275 debputy_snippets = maintscript_snippets.get(script) 

1276 if debputy_snippets is None: 

1277 return 

1278 reverse = script in ("prerm", "postrm") 

1279 snippets = [ 

1280 debputy_snippets.generate_snippet(reverse=reverse), 

1281 debputy_snippets.generate_snippet(snippet_order="service", reverse=reverse), 

1282 ] 

1283 if reverse: 

1284 snippets = reversed(snippets) 

1285 full_content = "".join(f"{s}\n" for s in filter(None, snippets)) 

1286 if not full_content: 

1287 return 

1288 filename = os.path.join(control_output_dir, script) 

1289 with open(filename, "wt") as fd: 

1290 fd.write("#!/bin/sh\nset -e\n\n") 

1291 fd.write(full_content) 

1292 os.chmod(fd.fileno(), 0o755) 

1293 

1294 

1295def _add_conffiles( 

1296 ctrl_root: VirtualPathBase, 

1297 conffile_matches: Iterable[VirtualPath], 

1298) -> None: 

1299 it = iter(conffile_matches) 

1300 first = next(it, None) 

1301 if first is None: 

1302 return 

1303 conffiles = itertools.chain([first], it) 

1304 with ctrl_root.open_child("conffiles", "at") as fd: 

1305 for conffile_match in conffiles: 

1306 conffile = conffile_match.absolute 

1307 assert conffile_match.is_file 

1308 fd.write(f"{conffile}\n") 

1309 

1310 

1311def _ensure_base_substvars_defined(substvars: FlushableSubstvars) -> None: 

1312 for substvar in ("misc:Depends", "misc:Pre-Depends"): 

1313 if substvar not in substvars: 

1314 substvars[substvar] = "" 

1315 

1316 

1317def compute_installed_size(fs_root: VirtualPath) -> int: 

1318 """Emulate dpkg-gencontrol's code for computing the default Installed-Size""" 

1319 size_in_kb = 0 

1320 hard_links = set() 

1321 for path in fs_root.all_paths(): 

1322 if path.is_symlink or path.is_file: 

1323 try: 

1324 st = path.stat() 

1325 if st.st_nlink > 1: 

1326 hl_key = (st.st_dev, st.st_ino) 

1327 if hl_key in hard_links: 

1328 continue 

1329 hard_links.add(hl_key) 

1330 size = st.st_size 

1331 except PureVirtualPathError: 

1332 # We just assume it is not a hard link when the path is purely virtual 

1333 size = path.size 

1334 path_size = (size + 1023) // 1024 

1335 else: 

1336 path_size = 1 

1337 size_in_kb += path_size 

1338 return size_in_kb 

1339 

1340 

1341def _generate_dbgsym_control_file_if_relevant( 

1342 binary_package: BinaryPackage, 

1343 dbgsym_fs_root: VirtualPath, 

1344 dbgsym_control_dir: FSControlRootDir, 

1345 dbgsym_ids: str, 

1346 multi_arch: Optional[str], 

1347 dctrl: str, 

1348 extra_common_params: Sequence[str], 

1349) -> None: 

1350 section = binary_package.archive_section 

1351 component = "" 

1352 extra_params = [] 

1353 if section is not None and "/" in section and not section.startswith("main/"): 

1354 component = section.split("/", 1)[1] + "/" 

1355 if multi_arch != "same": 

1356 extra_params.append("-UMulti-Arch") 

1357 else: 

1358 extra_params.append(f"-DMulti-Arch={multi_arch}") 

1359 extra_params.append("-UReplaces") 

1360 extra_params.append("-UBreaks") 

1361 dbgsym_control_fs_path = dbgsym_control_dir.fs_path 

1362 ensure_dir(dbgsym_control_fs_path) 

1363 # Pass it via cmd-line to make it more visible that we are providing the 

1364 # value. It also prevents the dbgsym package from picking up this value. 

1365 total_size = compute_installed_size(dbgsym_fs_root) + compute_installed_size( 

1366 dbgsym_control_dir 

1367 ) 

1368 extra_params.append(f"-VInstalled-Size={total_size}") 

1369 extra_params.extend(extra_common_params) 

1370 

1371 package = binary_package.name 

1372 package_selector = ( 

1373 binary_package.name 

1374 if dctrl == "debian/control" 

1375 else f"{binary_package.name}-dbgsym" 

1376 ) 

1377 dpkg_cmd = [ 

1378 "dpkg-gencontrol", 

1379 f"-p{package_selector}", 

1380 # FIXME: Support d/<pkg>.changelog at some point. 

1381 "-ldebian/changelog", 

1382 "-T/dev/null", 

1383 f"-c{dctrl}", 

1384 f"-O{dbgsym_control_fs_path}/control", 

1385 # Use a placeholder for -P to ensure failure if we forgot to override a path parameter 

1386 "-P/non-existent", 

1387 f"-DPackage={package}-dbgsym", 

1388 "-DDepends=" + package + " (= ${binary:Version})", 

1389 f"-DDescription=debug symbols for {package}", 

1390 f"-DSection={component}debug", 

1391 f"-DBuild-Ids={dbgsym_ids}", 

1392 "-UPre-Depends", 

1393 "-URecommends", 

1394 "-USuggests", 

1395 "-UEnhances", 

1396 "-UProvides", 

1397 "-UEssential", 

1398 "-UConflicts", 

1399 "-DPriority=optional", 

1400 "-UHomepage", 

1401 "-UImportant", 

1402 "-UBuilt-Using", 

1403 "-UStatic-Built-Using", 

1404 "-DAuto-Built-Package=debug-symbols", 

1405 "-UProtected", 

1406 *extra_params, 

1407 ] 

1408 print_command(*dpkg_cmd) 

1409 try: 

1410 subprocess.check_call(dpkg_cmd) 

1411 except subprocess.CalledProcessError: 

1412 _error( 

1413 f"Attempting to generate DEBIAN/control file for {package}-dbgsym failed. Please review the output from " 

1414 " dpkg-gencontrol above to understand what went wrong." 

1415 ) 

1416 os.chmod(os.path.join(dbgsym_control_fs_path, "control"), 0o644) 

1417 

1418 

1419def _all_parent_directories_of(directories: Iterable[str]) -> Set[str]: 

1420 result = {"."} 

1421 for path in directories: 

1422 current = os.path.dirname(path) 

1423 while current and current not in result: 

1424 result.add(current) 

1425 current = os.path.dirname(current) 

1426 return result 

1427 

1428 

1429def _compute_multi_arch_for_arch_all_doc( 

1430 binary_package: BinaryPackage, 

1431 fs_root: FSPath, 

1432) -> Optional[str]: 

1433 if not binary_package.name.endswith(("-doc", "-docs")): 

1434 # We limit by package name, since there are tricks involving a `Multi-Arch: no` depending on a 

1435 # `Multi-Arch: same` to emulate `Multi-Arch: allowed`. Said `Multi-Arch: no` can have no contents. 

1436 # 

1437 # That case seems unrealistic for -doc/-docs packages and accordingly the limitation here. 

1438 return None 

1439 acceptable_no_descend_paths = { 

1440 "./usr/share/doc", 

1441 } 

1442 acceptable_files = {f"./usr/share/lintian/overrides/{binary_package.name}"} 

1443 if _any_unacceptable_paths( 

1444 fs_root, 

1445 acceptable_no_descend_paths=acceptable_no_descend_paths, 

1446 acceptable_files=acceptable_files, 

1447 ): 

1448 return None 

1449 return "foreign" 

1450 

1451 

1452def _any_unacceptable_paths( 

1453 fs_root: FSPath, 

1454 *, 

1455 acceptable_no_descend_paths: Union[List[str], AbstractSet[str]] = frozenset(), 

1456 acceptable_files: Union[List[str], AbstractSet[str]] = frozenset(), 

1457) -> bool: 

1458 acceptable_intermediate_dirs = _all_parent_directories_of( 

1459 itertools.chain(acceptable_no_descend_paths, acceptable_files) 

1460 ) 

1461 for fs_path, children in fs_root.walk(): 

1462 path = fs_path.path 

1463 if path in acceptable_no_descend_paths: 

1464 children.clear() 

1465 continue 

1466 if path in acceptable_intermediate_dirs or path in acceptable_files: 

1467 continue 

1468 return True 

1469 return False 

1470 

1471 

1472def auto_compute_multi_arch( 

1473 binary_package: BinaryPackage, 

1474 control_output_dir: VirtualPath, 

1475 fs_root: FSPath, 

1476) -> Optional[str]: 

1477 resolved_arch = binary_package.resolved_architecture 

1478 if any( 

1479 script 

1480 for script in ALL_CONTROL_SCRIPTS 

1481 if (p := control_output_dir.get(script)) is not None and p.is_file 

1482 ): 

1483 return None 

1484 

1485 if resolved_arch == "all": 

1486 return _compute_multi_arch_for_arch_all_doc(binary_package, fs_root) 

1487 

1488 resolved_multiarch = binary_package.deb_multiarch 

1489 assert resolved_arch != "all" 

1490 acceptable_no_descend_paths = { 

1491 f"./usr/lib/{resolved_multiarch}", 

1492 f"./usr/include/{resolved_multiarch}", 

1493 } 

1494 acceptable_files = { 

1495 f"./usr/share/doc/{binary_package.name}/{basename}" 

1496 for basename in ( 

1497 "copyright", 

1498 "changelog.gz", 

1499 "changelog.Debian.gz", 

1500 f"changelog.Debian.{resolved_arch}.gz", 

1501 "NEWS.Debian", 

1502 "NEWS.Debian.gz", 

1503 "README.Debian", 

1504 "README.Debian.gz", 

1505 ) 

1506 } 

1507 

1508 # Note that the lintian-overrides file is deliberately omitted from the allow-list. We would have to know that the 

1509 # override does not use architecture segments. With pure debputy, this is guaranteed (debputy 

1510 # does not allow lintian-overrides with architecture segment). However, with a mixed debhelper + debputy, 

1511 # `dh_lintian` allows it with compat 13 or older. 

1512 

1513 if _any_unacceptable_paths( 

1514 fs_root, 

1515 acceptable_no_descend_paths=acceptable_no_descend_paths, 

1516 acceptable_files=acceptable_files, 

1517 ): 

1518 return None 

1519 

1520 return "same" 

1521 

1522 

1523@functools.lru_cache() 

1524def _has_t64_enabled() -> bool: 

1525 try: 

1526 output = subprocess.check_output( 

1527 ["dpkg-buildflags", "--query-features", "abi"] 

1528 ).decode() 

1529 except (subprocess.CalledProcessError, FileNotFoundError): 

1530 return False 

1531 

1532 for stanza in Deb822.iter_paragraphs(output): 

1533 if stanza.get("Feature") == "time64" and stanza.get("Enabled") == "yes": 

1534 return True 

1535 return False 

1536 

1537 

1538def _t64_migration_substvar( 

1539 binary_package: BinaryPackage, 

1540 control_output_dir: VirtualPath, 

1541 substvars: FlushableSubstvars, 

1542) -> None: 

1543 name = binary_package.name 

1544 compat_name = binary_package.fields.get("X-Time64-Compat") 

1545 if compat_name is None and not _T64_REGEX.match(name): 

1546 return 

1547 

1548 if not any( 

1549 p.is_file 

1550 for n in ["symbols", "shlibs"] 

1551 if (p := control_output_dir.get(n)) is not None 

1552 ): 

1553 return 

1554 

1555 if compat_name is None: 

1556 compat_name = name.replace("t64", "", 1) 

1557 if compat_name == name: 

1558 raise AssertionError( 

1559 f"Failed to derive a t64 compat name for {name}. Please file a bug against debputy." 

1560 " As a work around, you can explicitly provide a X-Time64-Compat header in debian/control" 

1561 " where you specify the desired compat name." 

1562 ) 

1563 

1564 arch_bits = binary_package.package_deb_architecture_variable("ARCH_BITS") 

1565 

1566 if arch_bits != "32" or not _has_t64_enabled(): 

1567 substvars.add_dependency( 

1568 _T64_PROVIDES, 

1569 f"{compat_name} (= ${ binary:Version} )", 

1570 ) 

1571 elif _T64_PROVIDES not in substvars: 

1572 substvars[_T64_PROVIDES] = "" 

1573 

1574 

1575@functools.lru_cache() 

1576def dpkg_field_list_pkg_dep() -> Sequence[str]: 

1577 try: 

1578 output = subprocess.check_output( 

1579 [ 

1580 "perl", 

1581 "-MDpkg::Control::Fields", 

1582 "-e", 

1583 r'print "$_\n" for field_list_pkg_dep', 

1584 ] 

1585 ) 

1586 except (FileNotFoundError, subprocess.CalledProcessError): 

1587 _error("Could not run perl -MDpkg::Control::Fields to get a list of fields") 

1588 return output.decode("utf-8").splitlines(keepends=False) 

1589 

1590 

1591_SUBSTVARS_FIELDS_NOT_SUPPORTED_BY_DPKG = { 

1592 "Commands", 

1593} 

1594 

1595 

1596@functools.lru_cache() 

1597def all_auto_substvars() -> Sequence[str]: 

1598 result = [x for x in dpkg_field_list_pkg_dep()] 

1599 result.extend(_SUBSTVARS_FIELDS_NOT_SUPPORTED_BY_DPKG) 

1600 return tuple(result) 

1601 

1602 

1603def _handle_auto_substvars( 

1604 source: SourcePackage, 

1605 dctrl_file: BinaryPackage, 

1606 substvars: FlushableSubstvars, 

1607 has_dbgsym: bool, 

1608) -> Optional[str]: 

1609 auto_substvars_fields = all_auto_substvars() 

1610 auto_substvars_fields_lc = {x.lower(): x for x in auto_substvars_fields} 

1611 substvar_fields = collections.defaultdict(set) 

1612 needs_dbgsym_stanza = False 

1613 for substvar_name, substvar in substvars.as_substvar.items(): 

1614 if ":" not in substvar_name: 

1615 continue 

1616 if substvar.assignment_operator in ("$=", "!="): 

1617 # Will create incorrect results if there is a dbgsym and we do nothing 

1618 needs_dbgsym_stanza = True 

1619 

1620 if substvar.assignment_operator == "$=": 

1621 # Automatically handled; no need for manual merging. 

1622 continue 

1623 _, field = substvar_name.rsplit(":", 1) 

1624 field_lc = field.lower() 

1625 if field_lc not in auto_substvars_fields_lc: 

1626 continue 

1627 substvar_fields[field_lc].add("${" + substvar_name + "}") 

1628 

1629 if not has_dbgsym: 

1630 needs_dbgsym_stanza = False 

1631 

1632 if not substvar_fields and not needs_dbgsym_stanza: 

1633 return None 

1634 

1635 replacement_stanza = debian.deb822.Deb822(dctrl_file.fields) 

1636 

1637 for field_name in auto_substvars_fields: 

1638 field_name_lc = field_name.lower() 

1639 addendum = substvar_fields.get(field_name_lc) 

1640 if addendum is None: 

1641 # No merging required 

1642 continue 

1643 substvars_part = ", ".join(addendum) 

1644 existing_value = replacement_stanza.get(field_name) 

1645 

1646 if existing_value is None or existing_value.isspace(): 

1647 final_value = substvars_part 

1648 else: 

1649 existing_value = existing_value.rstrip().rstrip(",") 

1650 final_value = f"{existing_value}, {substvars_part}" 

1651 replacement_stanza[field_name] = final_value 

1652 canonical_field_name = auto_substvars_fields_lc.get(field_name_lc) 

1653 # If `dpkg` does not know the field, we need to inject `XB-` in front 

1654 # of it. 

1655 if ( 

1656 canonical_field_name 

1657 and canonical_field_name in _SUBSTVARS_FIELDS_NOT_SUPPORTED_BY_DPKG 

1658 ): 

1659 replacement_stanza[f"XB-{canonical_field_name}"] = replacement_stanza[ 

1660 field_name 

1661 ] 

1662 del replacement_stanza[field_name] 

1663 

1664 with suppress(KeyError): 

1665 replacement_stanza.order_last("Description") 

1666 

1667 tmpdir = generated_content_dir(package=dctrl_file) 

1668 with tempfile.NamedTemporaryFile( 

1669 mode="wb", 

1670 dir=tmpdir, 

1671 suffix="__DEBIAN_control", 

1672 delete=False, 

1673 ) as fd: 

1674 try: 

1675 cast("Any", source.fields).dump(fd) 

1676 except AttributeError: 

1677 debian.deb822.Deb822(source.fields).dump(fd) 

1678 fd.write(b"\n") 

1679 replacement_stanza.dump(fd) 

1680 

1681 if has_dbgsym: 

1682 # Minimal stanza to avoid substvars warnings. Most fields are still set 

1683 # via -D. 

1684 dbgsym_stanza = Deb822() 

1685 dbgsym_stanza["Package"] = f"{dctrl_file.name}-dbgsym" 

1686 dbgsym_stanza["Architecture"] = dctrl_file.fields["Architecture"] 

1687 dbgsym_stanza["Description"] = f"debug symbols for {dctrl_file.name}" 

1688 fd.write(b"\n") 

1689 dbgsym_stanza.dump(fd) 

1690 

1691 return fd.name 

1692 

1693 

1694def _generate_control_files( 

1695 binary_package_data: "BinaryPackageData", 

1696 package_state: "PackageTransformationDefinition", 

1697 control_output_dir: FSControlRootDir, 

1698 fs_root: FSPath, 

1699 substvars: FlushableSubstvars, 

1700 dbgsym_root_fs: Optional[VirtualPath], 

1701 dbgsym_build_ids: Optional[List[str]], 

1702) -> None: 

1703 binary_package = binary_package_data.binary_package 

1704 source_package = binary_package_data.source_package 

1705 package_name = binary_package.name 

1706 extra_common_params = [] 

1707 extra_params_specific = [] 

1708 _ensure_base_substvars_defined(substvars) 

1709 if "Installed-Size" not in substvars: 

1710 # Pass it via cmd-line to make it more visible that we are providing the 

1711 # value. It also prevents the dbgsym package from picking up this value. 

1712 total_size = compute_installed_size(fs_root) + compute_installed_size( 

1713 control_output_dir 

1714 ) 

1715 extra_params_specific.append(f"-VInstalled-Size={total_size}") 

1716 

1717 ma_value = binary_package.fields.get("Multi-Arch") 

1718 if not binary_package.is_udeb and ma_value is None: 

1719 ma_value = auto_compute_multi_arch(binary_package, control_output_dir, fs_root) 

1720 if ma_value is not None: 

1721 _info( 

1722 f'The package "{binary_package.name}" looks like it should be "Multi-Arch: {ma_value}" based' 

1723 ' on the contents and there is no explicit "Multi-Arch" field. Setting the Multi-Arch field' 

1724 ' accordingly in the binary. If this auto-correction is wrong, please add "Multi-Arch: no" to the' 

1725 ' relevant part of "debian/control" to disable this feature.' 

1726 ) 

1727 # We want this to apply to the `-dbgsym` package as well to avoid 

1728 # lintian `debug-package-for-multi-arch-same-pkg-not-coinstallable` 

1729 extra_common_params.append(f"-DMulti-Arch={ma_value}") 

1730 elif ma_value == "no": 

1731 extra_common_params.append("-UMulti-Arch") 

1732 

1733 dbgsym_ids = " ".join(dbgsym_build_ids) if dbgsym_build_ids else "" 

1734 if package_state.binary_version is not None: 

1735 extra_common_params.append(f"-v{package_state.binary_version}") 

1736 

1737 _t64_migration_substvar(binary_package, control_output_dir, substvars) 

1738 

1739 with substvars.flush() as flushed_substvars: 

1740 has_dbgsym = dbgsym_root_fs is not None and any( 

1741 f for f in dbgsym_root_fs.all_paths() if f.is_file 

1742 ) 

1743 dctrl_file = _handle_auto_substvars( 

1744 source_package, 

1745 binary_package, 

1746 substvars, 

1747 has_dbgsym, 

1748 ) 

1749 if dctrl_file is None: 

1750 dctrl_file = "debian/control" 

1751 

1752 if has_dbgsym: 

1753 assert dbgsym_root_fs is not None # mypy hint 

1754 dbgsym_ctrl_dir = binary_package_data.dbgsym_info.dbgsym_ctrl_dir 

1755 _generate_dbgsym_control_file_if_relevant( 

1756 binary_package, 

1757 dbgsym_root_fs, 

1758 dbgsym_ctrl_dir, 

1759 dbgsym_ids, 

1760 ma_value, 

1761 dctrl_file, 

1762 extra_common_params, 

1763 ) 

1764 generate_md5sums_file( 

1765 dbgsym_ctrl_dir, 

1766 dbgsym_root_fs, 

1767 ) 

1768 elif dbgsym_ids: 

1769 extra_common_params.append(f"-DBuild-Ids={dbgsym_ids}") 

1770 

1771 ctrl_file = os.path.join(control_output_dir.fs_path, "control") 

1772 dpkg_cmd = [ 

1773 "dpkg-gencontrol", 

1774 f"-p{package_name}", 

1775 # FIXME: Support d/<pkg>.changelog at some point. 

1776 "-ldebian/changelog", 

1777 f"-c{dctrl_file}", 

1778 f"-T{flushed_substvars}", 

1779 f"-O{ctrl_file}", 

1780 # Use a placeholder for -P to ensure failure if we forgot to override a path parameter 

1781 "-P/non-existent", 

1782 *extra_common_params, 

1783 *extra_params_specific, 

1784 ] 

1785 print_command(*dpkg_cmd) 

1786 try: 

1787 subprocess.check_call(dpkg_cmd) 

1788 except subprocess.CalledProcessError: 

1789 _error( 

1790 f"Attempting to generate DEBIAN/control file for {package_name} failed. Please review the output from " 

1791 " dpkg-gencontrol above to understand what went wrong." 

1792 ) 

1793 os.chmod(ctrl_file, 0o644) 

1794 

1795 if not binary_package.is_udeb: 

1796 generate_md5sums_file(control_output_dir, fs_root)