Coverage for src/debputy/deb_packaging_support.py: 23%

815 statements  

« prev     ^ index     » next       coverage.py v7.6.0, created at 2025-01-27 13:59 +0000

1import collections 

2import contextlib 

3import dataclasses 

4import datetime 

5import functools 

6import hashlib 

7import itertools 

8import operator 

9import os 

10import re 

11import shutil 

12import subprocess 

13import tempfile 

14import textwrap 

15from contextlib import ExitStack 

16from tempfile import mkstemp 

17from typing import ( 

18 Iterable, 

19 List, 

20 Optional, 

21 Set, 

22 Dict, 

23 Sequence, 

24 Tuple, 

25 Iterator, 

26 Literal, 

27 TypeVar, 

28 FrozenSet, 

29 cast, 

30 Any, 

31 Union, 

32 Mapping, 

33 AbstractSet, 

34 TYPE_CHECKING, 

35) 

36 

37import debian.deb822 

38from debian.changelog import Changelog 

39from debian.deb822 import Deb822 

40 

41from debputy._deb_options_profiles import DebBuildOptionsAndProfiles 

42from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable 

43from debputy.dh.debhelper_emulation import ( 

44 dhe_install_pkg_file_as_ctrl_file_if_present, 

45) 

46from debputy.elf_util import find_all_elf_files, ELF_MAGIC 

47from debputy.exceptions import DebputyDpkgGensymbolsError, PureVirtualPathError 

48from debputy.filesystem_scan import FSPath, FSControlRootDir, VirtualPathBase 

49from debputy.maintscript_snippet import ( 

50 ALL_CONTROL_SCRIPTS, 

51 MaintscriptSnippetContainer, 

52 STD_CONTROL_SCRIPTS, 

53) 

54from debputy.packages import BinaryPackage, SourcePackage 

55from debputy.packaging.alternatives import process_alternatives 

56from debputy.packaging.debconf_templates import process_debconf_templates 

57from debputy.packaging.makeshlibs import ( 

58 compute_shlibs, 

59 ShlibsContent, 

60 generate_shlib_dirs, 

61) 

62from debputy.plugin.api.feature_set import PluginProvidedFeatureSet 

63from debputy.plugin.api.impl import ServiceRegistryImpl 

64from debputy.plugin.api.impl_types import ( 

65 MetadataOrMaintscriptDetector, 

66 PackageDataTable, 

67 ServiceManagerDetails, 

68) 

69from debputy.plugin.api.spec import ( 

70 FlushableSubstvars, 

71 VirtualPath, 

72 PackageProcessingContext, 

73 ServiceDefinition, 

74) 

75from debputy.plugin.debputy.binary_package_rules import ServiceRule 

76from debputy.util import ( 

77 _error, 

78 ensure_dir, 

79 assume_not_none, 

80 resolve_perl_config, 

81 perlxs_api_dependency, 

82 detect_fakeroot, 

83 grouper, 

84 _info, 

85 xargs, 

86 escape_shell, 

87 generated_content_dir, 

88 print_command, 

89 _warn, 

90) 

91 

92if TYPE_CHECKING: 

93 from debputy.highlevel_manifest import ( 

94 HighLevelManifest, 

95 PackageTransformationDefinition, 

96 BinaryPackageData, 

97 ) 

98 

99 

100VP = TypeVar("VP", bound=VirtualPath, covariant=True) 

101 

102_T64_REGEX = re.compile("^lib.*t64(?:-nss)?$") 

103_T64_PROVIDES = "t64:Provides" 

104 

105 

106def generate_md5sums_file( 

107 control_output_dir: VirtualPathBase, 

108 fs_root: VirtualPath, 

109) -> None: 

110 conffiles = control_output_dir.get("conffiles") 

111 exclude = set() 

112 if conffiles and conffiles.is_file: 

113 with conffiles.open() as fd: 

114 for line in fd: 

115 if not line.startswith("/"): 

116 continue 

117 exclude.add("." + line.rstrip("\n")) 

118 files_to_checksum = sorted( 

119 ( 

120 path 

121 for path in fs_root.all_paths() 

122 if path.is_file and path.path not in exclude 

123 ), 

124 # Sort in the same order as dh_md5sums, which is not quite the same as dpkg/`all_paths()` 

125 # Compare `.../doc/...` vs `.../doc-base/...` if you want to see the difference between 

126 # the two approaches. 

127 key=lambda p: p.path, 

128 ) 

129 if not files_to_checksum: 

130 return 

131 with control_output_dir.open_child("md5sums", "w") as md5fd: 

132 for member in files_to_checksum: 

133 path = member.path 

134 assert path.startswith("./") 

135 path = path[2:] 

136 with member.open(byte_io=True) as f: 

137 file_hash = hashlib.md5() 

138 while chunk := f.read(8192): 

139 file_hash.update(chunk) 

140 md5fd.write(f"{file_hash.hexdigest()} {path}\n") 

141 

142 

143def install_or_generate_conffiles( 

144 binary_package: BinaryPackage, 

145 ctrl_root: str, 

146 fs_root: VirtualPath, 

147 debian_dir: VirtualPath, 

148) -> None: 

149 conffiles_dest = os.path.join(ctrl_root, "conffiles") 

150 dhe_install_pkg_file_as_ctrl_file_if_present( 

151 debian_dir, 

152 binary_package, 

153 "conffiles", 

154 ctrl_root, 

155 0o0644, 

156 ) 

157 etc_dir = fs_root.lookup("etc") 

158 if etc_dir: 

159 _add_conffiles(conffiles_dest, (p for p in etc_dir.all_paths() if p.is_file)) 

160 if os.path.isfile(conffiles_dest): 

161 os.chmod(conffiles_dest, 0o0644) 

162 

163 

164PERL_DEP_PROGRAM = 1 

165PERL_DEP_INDEP_PM_MODULE = 2 

166PERL_DEP_XS_MODULE = 4 

167PERL_DEP_ARCH_PM_MODULE = 8 

168PERL_DEP_MA_ANY_INCOMPATIBLE_TYPES = ~(PERL_DEP_PROGRAM | PERL_DEP_INDEP_PM_MODULE) 

169 

170 

171@functools.lru_cache(2) # In practice, param will be "perl" or "perl-base" 

172def _dpkg_perl_version(package: str) -> str: 

173 dpkg_version = None 

174 lines = ( 

175 subprocess.check_output(["dpkg", "-s", package]) 

176 .decode("utf-8") 

177 .splitlines(keepends=False) 

178 ) 

179 for line in lines: 

180 if line.startswith("Version: "): 

181 dpkg_version = line[8:].strip() 

182 break 

183 assert dpkg_version is not None 

184 return dpkg_version 

185 

186 

187def handle_perl_code( 

188 dctrl_bin: BinaryPackage, 

189 dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable, 

190 fs_root: FSPath, 

191 substvars: FlushableSubstvars, 

192) -> None: 

193 perl_config_data = resolve_perl_config(dpkg_architecture_variables, dctrl_bin) 

194 detected_dep_requirements = 0 

195 

196 # MakeMaker always makes lib and share dirs, but typically only one directory is actually used. 

197 for perl_inc_dir in (perl_config_data.vendorarch, perl_config_data.vendorlib): 

198 p = fs_root.lookup(perl_inc_dir) 

199 if p and p.is_dir: 

200 p.prune_if_empty_dir() 

201 

202 # FIXME: 80% of this belongs in a metadata detector, but that requires us to expose .walk() in the public API, 

203 # which will not be today. 

204 for d, pm_mode in [ 

205 (perl_config_data.vendorlib, PERL_DEP_INDEP_PM_MODULE), 

206 (perl_config_data.vendorarch, PERL_DEP_ARCH_PM_MODULE), 

207 ]: 

208 inc_dir = fs_root.lookup(d) 

209 if not inc_dir: 

210 continue 

211 for path in inc_dir.all_paths(): 

212 if not path.is_file: 

213 continue 

214 if path.name.endswith(".so"): 

215 detected_dep_requirements |= PERL_DEP_XS_MODULE 

216 elif path.name.endswith(".pm"): 

217 detected_dep_requirements |= pm_mode 

218 

219 for path, children in fs_root.walk(): 

220 if path.path == "./usr/share/doc": 

221 children.clear() 

222 continue 

223 if ( 

224 not path.is_file 

225 or not path.has_fs_path 

226 or not (path.is_executable or path.name.endswith(".pl")) 

227 ): 

228 continue 

229 

230 interpreter = path.interpreter() 

231 if interpreter is not None and interpreter.command_full_basename == "perl": 

232 detected_dep_requirements |= PERL_DEP_PROGRAM 

233 

234 if not detected_dep_requirements: 

235 return 

236 dpackage = "perl" 

237 # FIXME: Currently, dh_perl supports perl-base via manual toggle. 

238 

239 dependency = dpackage 

240 if not (detected_dep_requirements & PERL_DEP_MA_ANY_INCOMPATIBLE_TYPES): 

241 dependency += ":any" 

242 

243 if detected_dep_requirements & PERL_DEP_XS_MODULE: 

244 dpkg_version = _dpkg_perl_version(dpackage) 

245 dependency += f" (>= {dpkg_version})" 

246 substvars.add_dependency("perl:Depends", dependency) 

247 

248 if detected_dep_requirements & (PERL_DEP_XS_MODULE | PERL_DEP_ARCH_PM_MODULE): 

249 substvars.add_dependency("perl:Depends", perlxs_api_dependency()) 

250 

251 

252def usr_local_transformation(dctrl: BinaryPackage, fs_root: VirtualPath) -> None: 

253 path = fs_root.lookup("./usr/local") 

254 if path and any(path.iterdir): 

255 # There are two key issues: 

256 # 1) Getting the generated maintscript carried on to the final maintscript 

257 # 2) Making sure that manifest created directories do not trigger the "unused error". 

258 _error( 

259 f"Replacement of /usr/local paths is currently not supported in debputy (triggered by: {dctrl.name})." 

260 ) 

261 

262 

263def _find_and_analyze_systemd_service_files( 

264 fs_root: VirtualPath, 

265 systemd_service_dir: Literal["system", "user"], 

266) -> Iterable[VirtualPath]: 

267 service_dirs = [ 

268 f"./usr/lib/systemd/{systemd_service_dir}", 

269 f"./lib/systemd/{systemd_service_dir}", 

270 ] 

271 aliases: Dict[str, List[str]] = collections.defaultdict(list) 

272 seen = set() 

273 all_files = [] 

274 

275 for d in service_dirs: 

276 system_dir = fs_root.lookup(d) 

277 if not system_dir: 

278 continue 

279 for child in system_dir.iterdir: 

280 if child.is_symlink: 

281 dest = os.path.basename(child.readlink()) 

282 aliases[dest].append(child.name) 

283 elif child.is_file and child.name not in seen: 

284 seen.add(child.name) 

285 all_files.append(child) 

286 

287 return all_files 

288 

289 

290def detect_systemd_user_service_files( 

291 dctrl: BinaryPackage, 

292 fs_root: VirtualPath, 

293) -> None: 

294 for service_file in _find_and_analyze_systemd_service_files(fs_root, "user"): 

295 _error( 

296 f'Sorry, systemd user services files are not supported at the moment (saw "{service_file.path}"' 

297 f" in {dctrl.name})" 

298 ) 

299 

300 

301# Generally, this should match the release date of oldstable or oldoldstable 

302_DCH_PRUNE_CUT_OFF_DATE = datetime.date(2019, 7, 6) 

303_DCH_MIN_NUM_OF_ENTRIES = 4 

304 

305 

306def _prune_dch_file( 

307 package: BinaryPackage, 

308 path: VirtualPath, 

309 is_changelog: bool, 

310 keep_versions: Optional[Set[str]], 

311 *, 

312 trim: bool = True, 

313) -> Tuple[bool, Optional[Set[str]]]: 

314 # TODO: Process `d/changelog` once 

315 # Note we cannot assume that changelog_file is always `d/changelog` as you can have 

316 # per-package changelogs. 

317 with path.open() as fd: 

318 dch = Changelog(fd) 

319 shortened = False 

320 important_entries = 0 

321 binnmu_entries = [] 

322 if is_changelog: 

323 kept_entries = [] 

324 for block in dch: 

325 if block.other_pairs.get("binary-only", "no") == "yes": 

326 # Always keep binNMU entries (they are always in the top) and they do not count 

327 # towards our kept_entries limit 

328 binnmu_entries.append(block) 

329 continue 

330 block_date = block.date 

331 if block_date is None: 

332 _error(f"The Debian changelog was missing date in sign off line") 

333 entry_date = datetime.datetime.strptime( 

334 block_date, "%a, %d %b %Y %H:%M:%S %z" 

335 ).date() 

336 if ( 

337 trim 

338 and entry_date < _DCH_PRUNE_CUT_OFF_DATE 

339 and important_entries >= _DCH_MIN_NUM_OF_ENTRIES 

340 ): 

341 shortened = True 

342 break 

343 # Match debhelper in incrementing after the check. 

344 important_entries += 1 

345 kept_entries.append(block) 

346 else: 

347 assert keep_versions is not None 

348 # The NEWS files should match the version for the dch to avoid lintian warnings. 

349 # If that means we remove all entries in the NEWS file, then we delete the NEWS 

350 # file (see #1021607) 

351 kept_entries = [b for b in dch if b.version in keep_versions] 

352 shortened = len(dch) > len(kept_entries) 

353 if shortened and not kept_entries: 

354 path.unlink() 

355 return True, None 

356 

357 if not shortened and not binnmu_entries: 

358 return False, None 

359 

360 parent_dir = assume_not_none(path.parent_dir) 

361 

362 with ( 

363 path.replace_fs_path_content() as fs_path, 

364 open(fs_path, "wt", encoding="utf-8") as fd, 

365 ): 

366 for entry in kept_entries: 

367 fd.write(str(entry)) 

368 

369 if is_changelog and shortened: 

370 # For changelog (rather than NEWS) files, add a note about how to 

371 # get the full version. 

372 msg = textwrap.dedent( 

373 f"""\ 

374 \ 

375 # Older entries have been removed from this changelog. 

376 # To read the complete changelog use `apt changelog {package.name}`. 

377 """ 

378 ) 

379 fd.write(msg) 

380 

381 if binnmu_entries: 

382 if package.is_arch_all: 

383 _error( 

384 f"The package {package.name} is architecture all, but it is built during a binNMU. A binNMU build" 

385 " must not include architecture all packages" 

386 ) 

387 

388 with ( 

389 parent_dir.add_file( 

390 f"{path.name}.{package.resolved_architecture}" 

391 ) as binnmu_changelog, 

392 open( 

393 binnmu_changelog.fs_path, 

394 "wt", 

395 encoding="utf-8", 

396 ) as binnmu_fd, 

397 ): 

398 for entry in binnmu_entries: 

399 binnmu_fd.write(str(entry)) 

400 

401 if not shortened: 

402 return False, None 

403 return True, {b.version for b in kept_entries} 

404 

405 

406def fixup_debian_changelog_and_news_file( 

407 dctrl: BinaryPackage, 

408 fs_root: VirtualPath, 

409 is_native: bool, 

410 build_env: DebBuildOptionsAndProfiles, 

411) -> None: 

412 doc_dir = fs_root.lookup(f"./usr/share/doc/{dctrl.name}") 

413 if not doc_dir: 

414 return 

415 changelog = doc_dir.get("changelog.Debian") 

416 if changelog and is_native: 

417 changelog.name = "changelog" 

418 elif is_native: 

419 changelog = doc_dir.get("changelog") 

420 

421 trim = False if "notrimdch" in build_env.deb_build_options else True 

422 

423 kept_entries = None 

424 pruned_changelog = False 

425 if changelog and changelog.has_fs_path: 

426 pruned_changelog, kept_entries = _prune_dch_file( 

427 dctrl, changelog, True, None, trim=trim 

428 ) 

429 

430 if not trim: 

431 return 

432 

433 news_file = doc_dir.get("NEWS.Debian") 

434 if news_file and news_file.has_fs_path and pruned_changelog: 

435 _prune_dch_file(dctrl, news_file, False, kept_entries) 

436 

437 

438_UPSTREAM_CHANGELOG_SOURCE_DIRS = [ 

439 ".", 

440 "doc", 

441 "docs", 

442] 

443_UPSTREAM_CHANGELOG_NAMES = { 

444 # The value is a priority to match the debhelper order. 

445 # - The suffix weights heavier than the basename (because that is what debhelper did) 

446 # 

447 # We list the name/suffix in order of priority in the code. That makes it easier to 

448 # see the priority directly, but it gives the "lowest" value to the most important items 

449 f"{n}{s}": (sw, nw) 

450 for (nw, n), (sw, s) in itertools.product( 

451 enumerate(["changelog", "changes", "history"], start=1), 

452 enumerate(["", ".txt", ".md", ".rst"], start=1), 

453 ) 

454} 

455_NONE_TUPLE = (None, (0, 0)) 

456 

457 

458def _detect_upstream_changelog(names: Iterable[str]) -> Optional[str]: 

459 matches = [] 

460 for name in names: 

461 match_priority = _UPSTREAM_CHANGELOG_NAMES.get(name.lower()) 

462 if match_priority is not None: 

463 matches.append((name, match_priority)) 

464 return min(matches, default=_NONE_TUPLE, key=operator.itemgetter(1))[0] 

465 

466 

467def install_upstream_changelog( 

468 dctrl_bin: BinaryPackage, 

469 fs_root: FSPath, 

470 source_fs_root: VirtualPath, 

471) -> None: 

472 doc_dir = f"./usr/share/doc/{dctrl_bin.name}" 

473 bdir = fs_root.lookup(doc_dir) 

474 if bdir and not bdir.is_dir: 

475 # "/usr/share/doc/foo -> bar" symlink. Avoid croaking on those per: 

476 # https://salsa.debian.org/debian/debputy/-/issues/49 

477 return 

478 

479 if bdir: 

480 if bdir.get("changelog") or bdir.get("changelog.gz"): 

481 # Upstream's build system already provided the changelog with the correct name. 

482 # Accept that as the canonical one. 

483 return 

484 upstream_changelog = _detect_upstream_changelog( 

485 p.name for p in bdir.iterdir if p.is_file and p.has_fs_path and p.size > 0 

486 ) 

487 if upstream_changelog: 

488 p = bdir.lookup(upstream_changelog) 

489 assert p is not None # Mostly as a typing hint 

490 p.name = "changelog" 

491 return 

492 for dirname in _UPSTREAM_CHANGELOG_SOURCE_DIRS: 

493 dir_path = source_fs_root.lookup(dirname) 

494 if not dir_path or not dir_path.is_dir: 

495 continue 

496 changelog_name = _detect_upstream_changelog( 

497 p.name 

498 for p in dir_path.iterdir 

499 if p.is_file and p.has_fs_path and p.size > 0 

500 ) 

501 if changelog_name: 501 ↛ 503line 501 didn't jump to line 503 because the condition on line 501 was always true

502 if bdir is None: 

503 bdir = fs_root.mkdirs(doc_dir) 

504 bdir.insert_file_from_fs_path( 

505 "changelog", 

506 dir_path[changelog_name].fs_path, 

507 ) 

508 break 

509 

510 

511@dataclasses.dataclass(slots=True) 

512class _ElfInfo: 

513 path: VirtualPath 

514 fs_path: str 

515 is_stripped: Optional[bool] = None 

516 build_id: Optional[str] = None 

517 dbgsym: Optional[FSPath] = None 

518 

519 

520def _elf_static_lib_walk_filter( 

521 fs_path: VirtualPath, 

522 children: List[VP], 

523) -> bool: 

524 if ( 

525 fs_path.name == ".build-id" 

526 and assume_not_none(fs_path.parent_dir).name == "debug" 

527 ): 

528 children.clear() 

529 return False 

530 # Deal with some special cases, where certain files are not supposed to be stripped in a given directory 

531 if "debug/" in fs_path.path or fs_path.name.endswith("debug/"): 

532 # FIXME: We need a way to opt out of this per #468333/#1016122 

533 for so_file in (f for f in list(children) if f.name.endswith(".so")): 

534 children.remove(so_file) 

535 if "/guile/" in fs_path.path or fs_path.name == "guile": 

536 for go_file in (f for f in list(children) if f.name.endswith(".go")): 

537 children.remove(go_file) 

538 return True 

539 

540 

541@contextlib.contextmanager 

542def _all_elf_files(fs_root: VirtualPath) -> Iterator[Dict[str, _ElfInfo]]: 

543 all_elf_files = find_all_elf_files( 

544 fs_root, 

545 walk_filter=_elf_static_lib_walk_filter, 

546 ) 

547 if not all_elf_files: 

548 yield {} 

549 return 

550 with ExitStack() as cm_stack: 

551 resolved = ( 

552 (p, cm_stack.enter_context(p.replace_fs_path_content())) 

553 for p in all_elf_files 

554 ) 

555 elf_info = { 

556 fs_path: _ElfInfo( 

557 path=assume_not_none(fs_root.lookup(detached_path.path)), 

558 fs_path=fs_path, 

559 ) 

560 for detached_path, fs_path in resolved 

561 } 

562 _resolve_build_ids(elf_info) 

563 yield elf_info 

564 

565 

566def _find_all_static_libs( 

567 fs_root: FSPath, 

568) -> Iterator[FSPath]: 

569 for path, children in fs_root.walk(): 

570 # Matching the logic of dh_strip for now. 

571 if not _elf_static_lib_walk_filter(path, children): 

572 continue 

573 if not path.is_file: 

574 continue 

575 if path.name.startswith("lib") and path.name.endswith("_g.a"): 

576 # _g.a are historically ignored. I do not remember why, but guessing the "_g" is 

577 # an encoding of gcc's -g parameter into the filename (with -g meaning "I want debug 

578 # symbols") 

579 continue 

580 if not path.has_fs_path: 

581 continue 

582 with path.open(byte_io=True) as fd: 

583 magic = fd.read(8) 

584 if magic not in (b"!<arch>\n", b"!<thin>\n"): 

585 continue 

586 # Maybe we should see if the first file looks like an index file. 

587 # Three random .a samples suggests the index file is named "/" 

588 # Not sure if we should skip past it and then do the ELF check or just assume 

589 # that "index => static lib". 

590 data = fd.read(1024 * 1024) 

591 if b"\0" not in data and ELF_MAGIC not in data: 

592 continue 

593 yield path 

594 

595 

596@contextlib.contextmanager 

597def _all_static_libs(fs_root: FSPath) -> Iterator[List[str]]: 

598 all_static_libs = list(_find_all_static_libs(fs_root)) 

599 if not all_static_libs: 

600 yield [] 

601 return 

602 with ExitStack() as cm_stack: 

603 resolved: List[str] = [ 

604 cm_stack.enter_context(p.replace_fs_path_content()) for p in all_static_libs 

605 ] 

606 yield resolved 

607 

608 

609_FILE_BUILD_ID_RE = re.compile(rb"BuildID(?:\[\S+\])?=([A-Fa-f0-9]+)") 

610 

611 

612def _resolve_build_ids(elf_info: Dict[str, _ElfInfo]) -> None: 

613 static_cmd = ["file", "-00", "-N"] 

614 if detect_fakeroot(): 

615 static_cmd.append("--no-sandbox") 

616 

617 for cmd in xargs(static_cmd, (i.fs_path for i in elf_info.values())): 

618 _info(f"Looking up build-ids via: {escape_shell(*cmd)}") 

619 output = subprocess.check_output(cmd) 

620 

621 # Trailing "\0" gives an empty element in the end when splitting, so strip it out 

622 lines = output.rstrip(b"\0").split(b"\0") 

623 

624 for fs_path_b, verdict in grouper(lines, 2, incomplete="strict"): 

625 fs_path = fs_path_b.decode("utf-8") 

626 info = elf_info[fs_path] 

627 info.is_stripped = b"not stripped" not in verdict 

628 m = _FILE_BUILD_ID_RE.search(verdict) 

629 if m: 

630 info.build_id = m.group(1).decode("utf-8") 

631 

632 

633def _make_debug_file( 

634 objcopy: str, fs_path: str, build_id: str, dbgsym_fs_root: FSPath 

635) -> FSPath: 

636 dbgsym_dirname = f"./usr/lib/debug/.build-id/{build_id[0:2]}/" 

637 dbgsym_basename = f"{build_id[2:]}.debug" 

638 dbgsym_dir = dbgsym_fs_root.mkdirs(dbgsym_dirname) 

639 if dbgsym_basename in dbgsym_dir: 

640 return dbgsym_dir[dbgsym_basename] 

641 # objcopy is a pain and includes the basename verbatim when you do `--add-gnu-debuglink` without having an option 

642 # to overwrite the physical basename. So we have to ensure that the physical basename matches the installed 

643 # basename. 

644 with dbgsym_dir.add_file( 

645 dbgsym_basename, 

646 unlink_if_exists=False, 

647 fs_basename_matters=True, 

648 subdir_key="dbgsym-build-ids", 

649 ) as dbgsym: 

650 try: 

651 subprocess.check_call( 

652 [ 

653 objcopy, 

654 "--only-keep-debug", 

655 "--compress-debug-sections", 

656 fs_path, 

657 dbgsym.fs_path, 

658 ] 

659 ) 

660 except subprocess.CalledProcessError: 

661 full_command = ( 

662 f"{objcopy} --only-keep-debug --compress-debug-sections" 

663 f" {escape_shell(fs_path, dbgsym.fs_path)}" 

664 ) 

665 _error( 

666 f"Attempting to create a .debug file failed. Please review the error message from {objcopy} to" 

667 f" understand what went wrong. Full command was: {full_command}" 

668 ) 

669 return dbgsym 

670 

671 

672def _strip_binary(strip: str, options: List[str], paths: Iterable[str]) -> None: 

673 # We assume the paths are obtained via `p.replace_fs_path_content()`, 

674 # which is the case at the time of written and should remain so forever. 

675 it = iter(paths) 

676 first = next(it, None) 

677 if first is None: 

678 return 

679 static_cmd = [strip] 

680 static_cmd.extend(options) 

681 

682 for cmd in xargs(static_cmd, itertools.chain((first,), (f for f in it))): 

683 _info(f"Removing unnecessary ELF debug info via: {escape_shell(*cmd)}") 

684 try: 

685 subprocess.check_call( 

686 cmd, 

687 stdin=subprocess.DEVNULL, 

688 restore_signals=True, 

689 ) 

690 except subprocess.CalledProcessError: 

691 _error( 

692 f"Attempting to remove ELF debug info failed. Please review the error from {strip} above" 

693 f" understand what went wrong." 

694 ) 

695 

696 

697def _attach_debug(objcopy: str, elf_binary: VirtualPath, dbgsym: FSPath) -> None: 

698 dbgsym_fs_path: str 

699 with dbgsym.replace_fs_path_content() as dbgsym_fs_path: 

700 cmd = [objcopy, "--add-gnu-debuglink", dbgsym_fs_path, elf_binary.fs_path] 

701 print_command(*cmd) 

702 try: 

703 subprocess.check_call(cmd) 

704 except subprocess.CalledProcessError: 

705 _error( 

706 f"Attempting to attach ELF debug link to ELF binary failed. Please review the error from {objcopy}" 

707 f" above understand what went wrong." 

708 ) 

709 

710 

711@functools.lru_cache() 

712def _has_tool(tool: str) -> bool: 

713 return shutil.which(tool) is not None 

714 

715 

716def _run_dwz( 

717 dctrl: BinaryPackage, 

718 dbgsym_fs_root: FSPath, 

719 unstripped_elf_info: List[_ElfInfo], 

720) -> None: 

721 if not unstripped_elf_info or dctrl.is_udeb or not _has_tool("dwz"): 

722 return 

723 dwz_cmd = ["dwz"] 

724 dwz_ma_dir_name = f"usr/lib/debug/.dwz/{dctrl.deb_multiarch}" 

725 dwz_ma_basename = f"{dctrl.name}.debug" 

726 multifile = f"{dwz_ma_dir_name}/{dwz_ma_basename}" 

727 build_time_multifile = None 

728 if len(unstripped_elf_info) > 1: 

729 fs_content_dir = generated_content_dir() 

730 fd, build_time_multifile = mkstemp(suffix=dwz_ma_basename, dir=fs_content_dir) 

731 os.close(fd) 

732 dwz_cmd.append(f"-m{build_time_multifile}") 

733 dwz_cmd.append(f"-M/{multifile}") 

734 

735 # TODO: configuration for disabling multi-file and tweaking memory limits 

736 

737 dwz_cmd.extend(e.fs_path for e in unstripped_elf_info) 

738 

739 _info(f"Deduplicating ELF debug info via: {escape_shell(*dwz_cmd)}") 

740 try: 

741 subprocess.check_call(dwz_cmd) 

742 except subprocess.CalledProcessError: 

743 _error( 

744 "Attempting to deduplicate ELF info via dwz failed. Please review the output from dwz above" 

745 " to understand what went wrong." 

746 ) 

747 if build_time_multifile is not None and os.stat(build_time_multifile).st_size > 0: 

748 dwz_dir = dbgsym_fs_root.mkdirs(dwz_ma_dir_name) 

749 dwz_dir.insert_file_from_fs_path( 

750 dwz_ma_basename, 

751 build_time_multifile, 

752 mode=0o644, 

753 require_copy_on_write=False, 

754 follow_symlinks=False, 

755 ) 

756 

757 

758def relocate_dwarves_into_dbgsym_packages( 

759 dctrl: BinaryPackage, 

760 package_fs_root: FSPath, 

761 dbgsym_fs_root: VirtualPath, 

762) -> List[str]: 

763 # FIXME: hardlinks 

764 with _all_static_libs(package_fs_root) as all_static_files: 

765 if all_static_files: 

766 strip = dctrl.cross_command("strip") 

767 _strip_binary( 

768 strip, 

769 [ 

770 "--strip-debug", 

771 "--remove-section=.comment", 

772 "--remove-section=.note", 

773 "--enable-deterministic-archives", 

774 "-R", 

775 ".gnu.lto_*", 

776 "-R", 

777 ".gnu.debuglto_*", 

778 "-N", 

779 "__gnu_lto_slim", 

780 "-N", 

781 "__gnu_lto_v1", 

782 ], 

783 all_static_files, 

784 ) 

785 

786 with _all_elf_files(package_fs_root) as all_elf_files: 

787 if not all_elf_files: 

788 return [] 

789 objcopy = dctrl.cross_command("objcopy") 

790 strip = dctrl.cross_command("strip") 

791 unstripped_elf_info = list( 

792 e for e in all_elf_files.values() if not e.is_stripped 

793 ) 

794 

795 _run_dwz(dctrl, dbgsym_fs_root, unstripped_elf_info) 

796 

797 for elf_info in unstripped_elf_info: 

798 elf_info.dbgsym = _make_debug_file( 

799 objcopy, 

800 elf_info.fs_path, 

801 assume_not_none(elf_info.build_id), 

802 dbgsym_fs_root, 

803 ) 

804 

805 # Note: When run strip, we do so also on already stripped ELF binaries because that is what debhelper does! 

806 # Executables (defined by mode) 

807 _strip_binary( 

808 strip, 

809 ["--remove-section=.comment", "--remove-section=.note"], 

810 (i.fs_path for i in all_elf_files.values() if i.path.is_executable), 

811 ) 

812 

813 # Libraries (defined by mode) 

814 _strip_binary( 

815 strip, 

816 ["--remove-section=.comment", "--remove-section=.note", "--strip-unneeded"], 

817 (i.fs_path for i in all_elf_files.values() if not i.path.is_executable), 

818 ) 

819 

820 for elf_info in unstripped_elf_info: 

821 _attach_debug( 

822 objcopy, 

823 assume_not_none(elf_info.path), 

824 assume_not_none(elf_info.dbgsym), 

825 ) 

826 

827 # Set for uniqueness 

828 all_debug_info = sorted( 

829 {assume_not_none(i.build_id) for i in unstripped_elf_info} 

830 ) 

831 

832 dbgsym_doc_dir = dbgsym_fs_root.mkdirs("./usr/share/doc/") 

833 dbgsym_doc_dir.add_symlink(f"{dctrl.name}-dbgsym", dctrl.name) 

834 return all_debug_info 

835 

836 

837def run_package_processors( 

838 manifest: "HighLevelManifest", 

839 package_metadata_context: PackageProcessingContext, 

840 fs_root: VirtualPath, 

841) -> None: 

842 pppps = manifest.plugin_provided_feature_set.package_processors_in_order() 

843 binary_package = package_metadata_context.binary_package 

844 for pppp in pppps: 

845 if not pppp.applies_to(binary_package): 

846 continue 

847 pppp.run_package_processor(fs_root, None, package_metadata_context) 

848 

849 

850def cross_package_control_files( 

851 package_data_table: PackageDataTable, 

852 manifest: "HighLevelManifest", 

853) -> None: 

854 errors = [] 

855 combined_shlibs = ShlibsContent() 

856 shlibs_dir = None 

857 shlib_dirs: List[str] = [] 

858 shlibs_local = manifest.debian_dir.get("shlibs.local") 

859 if shlibs_local and shlibs_local.is_file: 

860 with shlibs_local.open() as fd: 

861 combined_shlibs.add_entries_from_shlibs_file(fd) 

862 

863 debputy_plugin_metadata = manifest.plugin_provided_feature_set.plugin_data[ 

864 "debputy" 

865 ] 

866 

867 for binary_package_data in package_data_table: 

868 binary_package = binary_package_data.binary_package 

869 if binary_package.is_arch_all or not binary_package.should_be_acted_on: 

870 continue 

871 fs_root = binary_package_data.fs_root 

872 package_state = manifest.package_state_for(binary_package.name) 

873 related_udeb_package = ( 

874 binary_package_data.package_metadata_context.related_udeb_package 

875 ) 

876 

877 udeb_package_name = related_udeb_package.name if related_udeb_package else None 

878 ctrl = binary_package_data.ctrl_creator.for_plugin( 

879 debputy_plugin_metadata, 

880 "compute_shlibs", 

881 ) 

882 try: 

883 soname_info_list = compute_shlibs( 

884 binary_package, 

885 binary_package_data.control_output_dir.fs_path, 

886 fs_root, 

887 manifest, 

888 udeb_package_name, 

889 ctrl, 

890 package_state.reserved_packager_provided_files, 

891 combined_shlibs, 

892 ) 

893 except DebputyDpkgGensymbolsError as e: 

894 errors.append(e.message) 

895 else: 

896 if soname_info_list: 

897 if shlibs_dir is None: 

898 shlibs_dir = generated_content_dir( 

899 subdir_key="_shlibs_materialization_dir" 

900 ) 

901 generate_shlib_dirs( 

902 binary_package, 

903 shlibs_dir, 

904 soname_info_list, 

905 shlib_dirs, 

906 ) 

907 if errors: 

908 for error in errors: 

909 _warn(error) 

910 _error("Stopping due to the errors above") 

911 

912 generated_shlibs_local = None 

913 if combined_shlibs: 

914 if shlibs_dir is None: 

915 shlibs_dir = generated_content_dir(subdir_key="_shlibs_materialization_dir") 

916 generated_shlibs_local = os.path.join(shlibs_dir, "shlibs.local") 

917 with open(generated_shlibs_local, "wt", encoding="utf-8") as fd: 

918 combined_shlibs.write_to(fd) 

919 _info(f"Generated {generated_shlibs_local} for dpkg-shlibdeps") 

920 

921 for binary_package_data in package_data_table: 

922 binary_package = binary_package_data.binary_package 

923 if binary_package.is_arch_all or not binary_package.should_be_acted_on: 

924 continue 

925 binary_package_data.ctrl_creator.shlibs_details = ( 

926 generated_shlibs_local, 

927 shlib_dirs, 

928 ) 

929 

930 

931def _relevant_service_definitions( 

932 service_rule: ServiceRule, 

933 service_managers: Union[List[str], FrozenSet[str]], 

934 by_service_manager_key: Mapping[ 

935 Tuple[str, str, str, str], Tuple[ServiceManagerDetails, ServiceDefinition[Any]] 

936 ], 

937 aliases: Mapping[str, Sequence[Tuple[str, str, str, str]]], 

938) -> Iterable[Tuple[Tuple[str, str, str, str], ServiceDefinition[Any]]]: 

939 as_keys = (key for key in aliases[service_rule.service]) 

940 

941 pending_queue = { 

942 key 

943 for key in as_keys 

944 if key in by_service_manager_key 

945 and service_rule.applies_to_service_manager(key[-1]) 

946 } 

947 relevant_names: Dict[Tuple[str, str, str, str], ServiceDefinition[Any]] = {} 

948 seen_keys = set() 

949 

950 if not pending_queue: 

951 service_manager_names = ", ".join(sorted(service_managers)) 

952 _error( 

953 f"No none of the service managers ({service_manager_names}) detected a service named" 

954 f" {service_rule.service} (type: {service_rule.type_of_service}, scope: {service_rule.service_scope})," 

955 f" but the manifest definition at {service_rule.definition_source} requested that." 

956 ) 

957 

958 while pending_queue: 

959 next_key = pending_queue.pop() 

960 seen_keys.add(next_key) 

961 _, definition = by_service_manager_key[next_key] 

962 yield next_key, definition 

963 for name in definition.names: 

964 for target_key in aliases[name]: 

965 if ( 

966 target_key not in seen_keys 

967 and service_rule.applies_to_service_manager(target_key[-1]) 

968 ): 

969 pending_queue.add(target_key) 

970 

971 return relevant_names.items() 

972 

973 

974def handle_service_management( 

975 binary_package_data: "BinaryPackageData", 

976 manifest: "HighLevelManifest", 

977 package_metadata_context: PackageProcessingContext, 

978 fs_root: VirtualPath, 

979 feature_set: PluginProvidedFeatureSet, 

980) -> None: 

981 

982 by_service_manager_key = {} 

983 aliases_by_name = collections.defaultdict(list) 

984 

985 state = manifest.package_state_for(binary_package_data.binary_package.name) 

986 all_service_managers = list(feature_set.service_managers) 

987 requested_service_rules = state.requested_service_rules 

988 for requested_service_rule in requested_service_rules: 

989 if not requested_service_rule.service_managers: 

990 continue 

991 for manager in requested_service_rule.service_managers: 

992 if manager not in feature_set.service_managers: 

993 # FIXME: Missing definition source; move to parsing. 

994 _error( 

995 f"Unknown service manager {manager} used at {requested_service_rule.definition_source}" 

996 ) 

997 

998 for service_manager_details in feature_set.service_managers.values(): 

999 service_registry: ServiceRegistryImpl = ServiceRegistryImpl( 

1000 service_manager_details 

1001 ) 

1002 service_manager_details.service_detector( 

1003 fs_root, 

1004 service_registry, 

1005 package_metadata_context, 

1006 ) 

1007 

1008 service_definitions = service_registry.detected_services 

1009 if not service_definitions: 

1010 continue 

1011 

1012 for plugin_provided_definition in service_definitions: 

1013 key = ( 

1014 plugin_provided_definition.name, 

1015 plugin_provided_definition.type_of_service, 

1016 plugin_provided_definition.service_scope, 

1017 service_manager_details.service_manager, 

1018 ) 

1019 by_service_manager_key[key] = ( 

1020 service_manager_details, 

1021 plugin_provided_definition, 

1022 ) 

1023 

1024 for name in plugin_provided_definition.names: 

1025 aliases_by_name[name].append(key) 

1026 

1027 for requested_service_rule in requested_service_rules: 

1028 explicit_service_managers = requested_service_rule.service_managers is not None 

1029 related_service_managers = ( 

1030 requested_service_rule.service_managers or all_service_managers 

1031 ) 

1032 seen_service_managers = set() 

1033 for service_key, service_definition in _relevant_service_definitions( 

1034 requested_service_rule, 

1035 related_service_managers, 

1036 by_service_manager_key, 

1037 aliases_by_name, 

1038 ): 

1039 sm = service_key[-1] 

1040 seen_service_managers.add(sm) 

1041 by_service_manager_key[service_key] = ( 

1042 by_service_manager_key[service_key][0], 

1043 requested_service_rule.apply_to_service_definition(service_definition), 

1044 ) 

1045 if ( 

1046 explicit_service_managers 

1047 and seen_service_managers != related_service_managers 

1048 ): 

1049 missing_sms = ", ".join( 

1050 sorted(related_service_managers - seen_service_managers) 

1051 ) 

1052 _error( 

1053 f"The rule {requested_service_rule.definition_source} explicitly requested which service managers" 

1054 f" it should apply to. However, the following service managers did not provide a service of that" 

1055 f" name, type and scope: {missing_sms}. Please check the rule is correct and either provide the" 

1056 f" missing service or update the definition match the relevant services." 

1057 ) 

1058 

1059 per_service_manager = {} 

1060 

1061 for ( 

1062 service_manager_details, 

1063 plugin_provided_definition, 

1064 ) in by_service_manager_key.values(): 

1065 service_manager = service_manager_details.service_manager 

1066 if service_manager not in per_service_manager: 

1067 per_service_manager[service_manager] = ( 

1068 service_manager_details, 

1069 [plugin_provided_definition], 

1070 ) 

1071 else: 

1072 per_service_manager[service_manager][1].append(plugin_provided_definition) 

1073 

1074 for ( 

1075 service_manager_details, 

1076 final_service_definitions, 

1077 ) in per_service_manager.values(): 

1078 ctrl = binary_package_data.ctrl_creator.for_plugin( 

1079 service_manager_details.plugin_metadata, 

1080 service_manager_details.service_manager, 

1081 default_snippet_order="service", 

1082 ) 

1083 _info(f"Applying {final_service_definitions}") 

1084 service_manager_details.service_integrator( 

1085 final_service_definitions, 

1086 ctrl, 

1087 package_metadata_context, 

1088 ) 

1089 

1090 

1091def setup_control_files( 

1092 binary_package_data: "BinaryPackageData", 

1093 manifest: "HighLevelManifest", 

1094 dbgsym_fs_root: VirtualPath, 

1095 dbgsym_ids: List[str], 

1096 package_metadata_context: PackageProcessingContext, 

1097 *, 

1098 allow_ctrl_file_management: bool = True, 

1099) -> None: 

1100 binary_package = package_metadata_context.binary_package 

1101 control_output_dir = binary_package_data.control_output_dir 

1102 control_output_fs_path = control_output_dir.fs_path 

1103 fs_root = binary_package_data.fs_root 

1104 package_state = manifest.package_state_for(binary_package.name) 

1105 

1106 feature_set: PluginProvidedFeatureSet = manifest.plugin_provided_feature_set 

1107 metadata_maintscript_detectors = feature_set.metadata_maintscript_detectors 

1108 substvars = binary_package_data.substvars 

1109 

1110 snippets = STD_CONTROL_SCRIPTS 

1111 generated_triggers = list(binary_package_data.ctrl_creator.generated_triggers()) 

1112 

1113 if binary_package.is_udeb: 

1114 # FIXME: Add missing udeb scripts 

1115 snippets = ["postinst"] 

1116 

1117 if allow_ctrl_file_management: 

1118 process_alternatives( 

1119 binary_package, 

1120 fs_root, 

1121 package_state.reserved_packager_provided_files, 

1122 package_state.maintscript_snippets, 

1123 ) 

1124 process_debconf_templates( 

1125 binary_package, 

1126 package_state.reserved_packager_provided_files, 

1127 package_state.maintscript_snippets, 

1128 substvars, 

1129 control_output_fs_path, 

1130 ) 

1131 

1132 handle_service_management( 

1133 binary_package_data, 

1134 manifest, 

1135 package_metadata_context, 

1136 fs_root, 

1137 feature_set, 

1138 ) 

1139 

1140 plugin_detector_definition: MetadataOrMaintscriptDetector 

1141 for plugin_detector_definition in itertools.chain.from_iterable( 

1142 metadata_maintscript_detectors.values() 

1143 ): 

1144 if not plugin_detector_definition.applies_to(binary_package): 

1145 continue 

1146 ctrl = binary_package_data.ctrl_creator.for_plugin( 

1147 plugin_detector_definition.plugin_metadata, 

1148 plugin_detector_definition.detector_id, 

1149 ) 

1150 plugin_detector_definition.run_detector( 

1151 fs_root, ctrl, package_metadata_context 

1152 ) 

1153 

1154 for script in snippets: 

1155 _generate_snippet( 

1156 control_output_fs_path, 

1157 script, 

1158 package_state.maintscript_snippets, 

1159 ) 

1160 

1161 else: 

1162 state = manifest.package_state_for(binary_package_data.binary_package.name) 

1163 if state.requested_service_rules: 

1164 service_source = state.requested_service_rules[0].definition_source 

1165 _error( 

1166 f"Use of service definitions (such as {service_source}) is not supported in this integration mode" 

1167 ) 

1168 for script, snippet_container in package_state.maintscript_snippets.items(): 

1169 for snippet in snippet_container.all_snippets(): 

1170 source = snippet.definition_source 

1171 _error( 

1172 f"This integration mode cannot use maintscript snippets" 

1173 f' (since dh_installdeb has already been called). However, "{source}" triggered' 

1174 f" a snippet for {script}. Please remove the offending definition if it is from" 

1175 f" the manifest or file a bug if it is caused by a built-in rule." 

1176 ) 

1177 

1178 for trigger in generated_triggers: 

1179 source = f"{trigger.provider.plugin_name}:{trigger.provider_source_id}" 

1180 _error( 

1181 f"This integration mode must not generate triggers" 

1182 f' (since dh_installdeb has already been called). However, "{source}" created' 

1183 f" a trigger. Please remove the offending definition if it is from" 

1184 f" the manifest or file a bug if it is caused by a built-in rule." 

1185 ) 

1186 

1187 shlibdeps_definition = [ 

1188 d 

1189 for d in metadata_maintscript_detectors["debputy"] 

1190 if d.detector_id == "dpkg-shlibdeps" 

1191 ][0] 

1192 

1193 ctrl = binary_package_data.ctrl_creator.for_plugin( 

1194 shlibdeps_definition.plugin_metadata, 

1195 shlibdeps_definition.detector_id, 

1196 ) 

1197 shlibdeps_definition.run_detector(fs_root, ctrl, package_metadata_context) 

1198 

1199 dh_staging_dir = os.path.join("debian", binary_package.name, "DEBIAN") 

1200 try: 

1201 with os.scandir(dh_staging_dir) as it: 

1202 existing_control_files = [ 

1203 f.path 

1204 for f in it 

1205 if f.is_file(follow_symlinks=False) 

1206 and f.name not in ("control", "md5sums") 

1207 ] 

1208 except FileNotFoundError: 

1209 existing_control_files = [] 

1210 

1211 if existing_control_files: 

1212 cmd = ["cp", "-a"] 

1213 cmd.extend(existing_control_files) 

1214 cmd.append(control_output_fs_path) 

1215 print_command(*cmd) 

1216 subprocess.check_call(cmd) 

1217 

1218 if binary_package.is_udeb: 

1219 _generate_control_files( 

1220 binary_package_data, 

1221 package_state, 

1222 control_output_dir, 

1223 fs_root, 

1224 substvars, 

1225 # We never built udebs due to #797391, so skip over this information, 

1226 # when creating the udeb 

1227 None, 

1228 None, 

1229 ) 

1230 return 

1231 

1232 if generated_triggers: 

1233 assert allow_ctrl_file_management 

1234 dest_file = os.path.join(control_output_fs_path, "triggers") 

1235 with open(dest_file, "at", encoding="utf-8") as fd: 

1236 fd.writelines( 

1237 textwrap.dedent( 

1238 f"""\ 

1239 \ 

1240 # Added by {t.provider_source_id} from {t.provider.plugin_name} 

1241 {t.dpkg_trigger_type} {t.dpkg_trigger_target} 

1242 """ 

1243 ) 

1244 for t in generated_triggers 

1245 ) 

1246 os.chmod(fd.fileno(), 0o644) 

1247 

1248 if allow_ctrl_file_management: 

1249 install_or_generate_conffiles( 

1250 binary_package, 

1251 control_output_fs_path, 

1252 fs_root, 

1253 manifest.debian_dir, 

1254 ) 

1255 

1256 _generate_control_files( 

1257 binary_package_data, 

1258 package_state, 

1259 control_output_dir, 

1260 fs_root, 

1261 substvars, 

1262 dbgsym_fs_root, 

1263 dbgsym_ids, 

1264 ) 

1265 

1266 

1267def _generate_snippet( 

1268 control_output_dir: str, 

1269 script: str, 

1270 maintscript_snippets: Dict[str, MaintscriptSnippetContainer], 

1271) -> None: 

1272 debputy_snippets = maintscript_snippets.get(script) 

1273 if debputy_snippets is None: 

1274 return 

1275 reverse = script in ("prerm", "postrm") 

1276 snippets = [ 

1277 debputy_snippets.generate_snippet(reverse=reverse), 

1278 debputy_snippets.generate_snippet(snippet_order="service", reverse=reverse), 

1279 ] 

1280 if reverse: 

1281 snippets = reversed(snippets) 

1282 full_content = "".join(f"{s}\n" for s in filter(None, snippets)) 

1283 if not full_content: 

1284 return 

1285 filename = os.path.join(control_output_dir, script) 

1286 with open(filename, "wt") as fd: 

1287 fd.write("#!/bin/sh\nset -e\n\n") 

1288 fd.write(full_content) 

1289 os.chmod(fd.fileno(), 0o755) 

1290 

1291 

1292def _add_conffiles( 

1293 conffiles_dest: str, 

1294 conffile_matches: Iterable[VirtualPath], 

1295) -> None: 

1296 with open(conffiles_dest, "at") as fd: 

1297 for conffile_match in conffile_matches: 

1298 conffile = conffile_match.absolute 

1299 assert conffile_match.is_file 

1300 fd.write(f"{conffile}\n") 

1301 if os.stat(conffiles_dest).st_size == 0: 

1302 os.unlink(conffiles_dest) 

1303 

1304 

1305def _ensure_base_substvars_defined(substvars: FlushableSubstvars) -> None: 

1306 for substvar in ("misc:Depends", "misc:Pre-Depends"): 

1307 if substvar not in substvars: 

1308 substvars[substvar] = "" 

1309 

1310 

1311def compute_installed_size(fs_root: VirtualPath) -> int: 

1312 """Emulate dpkg-gencontrol's code for computing the default Installed-Size""" 

1313 size_in_kb = 0 

1314 hard_links = set() 

1315 for path in fs_root.all_paths(): 

1316 if path.is_symlink or path.is_file: 

1317 try: 

1318 st = path.stat() 

1319 if st.st_nlink > 1: 

1320 hl_key = (st.st_dev, st.st_ino) 

1321 if hl_key in hard_links: 

1322 continue 

1323 hard_links.add(hl_key) 

1324 size = st.st_size 

1325 except PureVirtualPathError: 

1326 # We just assume it is not a hard link when the path is purely virtual 

1327 size = path.size 

1328 path_size = (size + 1023) // 1024 

1329 else: 

1330 path_size = 1 

1331 size_in_kb += path_size 

1332 return size_in_kb 

1333 

1334 

1335def _generate_dbgsym_control_file_if_relevant( 

1336 binary_package: BinaryPackage, 

1337 dbgsym_fs_root: VirtualPath, 

1338 dbgsym_control_dir: FSControlRootDir, 

1339 dbgsym_ids: str, 

1340 multi_arch: Optional[str], 

1341 dctrl: str, 

1342 extra_common_params: Sequence[str], 

1343) -> None: 

1344 section = binary_package.archive_section 

1345 component = "" 

1346 extra_params = [] 

1347 if section is not None and "/" in section and not section.startswith("main/"): 

1348 component = section.split("/", 1)[1] + "/" 

1349 if multi_arch != "same": 

1350 extra_params.append("-UMulti-Arch") 

1351 else: 

1352 extra_params.append(f"-DMulti-Arch={multi_arch}") 

1353 extra_params.append("-UReplaces") 

1354 extra_params.append("-UBreaks") 

1355 dbgsym_control_fs_path = dbgsym_control_dir.fs_path 

1356 ensure_dir(dbgsym_control_fs_path) 

1357 # Pass it via cmd-line to make it more visible that we are providing the 

1358 # value. It also prevents the dbgsym package from picking up this value. 

1359 total_size = compute_installed_size(dbgsym_fs_root) + compute_installed_size( 

1360 dbgsym_control_dir 

1361 ) 

1362 extra_params.append(f"-VInstalled-Size={total_size}") 

1363 extra_params.extend(extra_common_params) 

1364 

1365 package = binary_package.name 

1366 package_selector = ( 

1367 binary_package.name 

1368 if dctrl == "debian/control" 

1369 else f"{binary_package.name}-dbgsym" 

1370 ) 

1371 dpkg_cmd = [ 

1372 "dpkg-gencontrol", 

1373 f"-p{package_selector}", 

1374 # FIXME: Support d/<pkg>.changelog at some point. 

1375 "-ldebian/changelog", 

1376 "-T/dev/null", 

1377 f"-c{dctrl}", 

1378 f"-O{dbgsym_control_fs_path}/control", 

1379 # Use a placeholder for -P to ensure failure if we forgot to override a path parameter 

1380 "-P/non-existent", 

1381 f"-DPackage={package}-dbgsym", 

1382 "-DDepends=" + package + " (= ${binary:Version})", 

1383 f"-DDescription=debug symbols for {package}", 

1384 f"-DSection={component}debug", 

1385 f"-DBuild-Ids={dbgsym_ids}", 

1386 "-UPre-Depends", 

1387 "-URecommends", 

1388 "-USuggests", 

1389 "-UEnhances", 

1390 "-UProvides", 

1391 "-UEssential", 

1392 "-UConflicts", 

1393 "-DPriority=optional", 

1394 "-UHomepage", 

1395 "-UImportant", 

1396 "-UBuilt-Using", 

1397 "-UStatic-Built-Using", 

1398 "-DAuto-Built-Package=debug-symbols", 

1399 "-UProtected", 

1400 *extra_params, 

1401 ] 

1402 print_command(*dpkg_cmd) 

1403 try: 

1404 subprocess.check_call(dpkg_cmd) 

1405 except subprocess.CalledProcessError: 

1406 _error( 

1407 f"Attempting to generate DEBIAN/control file for {package}-dbgsym failed. Please review the output from " 

1408 " dpkg-gencontrol above to understand what went wrong." 

1409 ) 

1410 os.chmod(os.path.join(dbgsym_control_fs_path, "control"), 0o644) 

1411 

1412 

1413def _all_parent_directories_of(directories: Iterable[str]) -> Set[str]: 

1414 result = {"."} 

1415 for path in directories: 

1416 current = os.path.dirname(path) 

1417 while current and current not in result: 

1418 result.add(current) 

1419 current = os.path.dirname(current) 

1420 return result 

1421 

1422 

1423def _compute_multi_arch_for_arch_all_doc( 

1424 binary_package: BinaryPackage, 

1425 fs_root: FSPath, 

1426) -> Optional[str]: 

1427 if not binary_package.name.endswith(("-doc", "-docs")): 

1428 # We limit by package name, since there are tricks involving a `Multi-Arch: no` depending on a 

1429 # `Multi-Arch: same` to emulate `Multi-Arch: allowed`. Said `Multi-Arch: no` can have no contents. 

1430 # 

1431 # That case seems unrealistic for -doc/-docs packages and accordingly the limitation here. 

1432 return None 

1433 acceptable_no_descend_paths = { 

1434 "./usr/share/doc", 

1435 } 

1436 acceptable_files = {f"./usr/share/lintian/overrides/{binary_package.name}"} 

1437 if _any_unacceptable_paths( 

1438 fs_root, 

1439 acceptable_no_descend_paths=acceptable_no_descend_paths, 

1440 acceptable_files=acceptable_files, 

1441 ): 

1442 return None 

1443 return "foreign" 

1444 

1445 

1446def _any_unacceptable_paths( 

1447 fs_root: FSPath, 

1448 *, 

1449 acceptable_no_descend_paths: Union[List[str], AbstractSet[str]] = frozenset(), 

1450 acceptable_files: Union[List[str], AbstractSet[str]] = frozenset(), 

1451) -> bool: 

1452 acceptable_intermediate_dirs = _all_parent_directories_of( 

1453 itertools.chain(acceptable_no_descend_paths, acceptable_files) 

1454 ) 

1455 for fs_path, children in fs_root.walk(): 

1456 path = fs_path.path 

1457 if path in acceptable_no_descend_paths: 

1458 children.clear() 

1459 continue 

1460 if path in acceptable_intermediate_dirs or path in acceptable_files: 

1461 continue 

1462 return True 

1463 return False 

1464 

1465 

1466def auto_compute_multi_arch( 

1467 binary_package: BinaryPackage, 

1468 control_output_dir: VirtualPath, 

1469 fs_root: FSPath, 

1470) -> Optional[str]: 

1471 resolved_arch = binary_package.resolved_architecture 

1472 if any( 

1473 script 

1474 for script in ALL_CONTROL_SCRIPTS 

1475 if (p := control_output_dir.get(script)) is not None and p.is_file 

1476 ): 

1477 return None 

1478 

1479 if resolved_arch == "all": 

1480 return _compute_multi_arch_for_arch_all_doc(binary_package, fs_root) 

1481 

1482 resolved_multiarch = binary_package.deb_multiarch 

1483 assert resolved_arch != "all" 

1484 acceptable_no_descend_paths = { 

1485 f"./usr/lib/{resolved_multiarch}", 

1486 f"./usr/include/{resolved_multiarch}", 

1487 } 

1488 acceptable_files = { 

1489 f"./usr/share/doc/{binary_package.name}/{basename}" 

1490 for basename in ( 

1491 "copyright", 

1492 "changelog.gz", 

1493 "changelog.Debian.gz", 

1494 f"changelog.Debian.{resolved_arch}.gz", 

1495 "NEWS.Debian", 

1496 "NEWS.Debian.gz", 

1497 "README.Debian", 

1498 "README.Debian.gz", 

1499 ) 

1500 } 

1501 

1502 # Note that the lintian-overrides file is deliberately omitted from the allow-list. We would have to know that the 

1503 # override does not use architecture segments. With pure debputy, this is guaranteed (debputy 

1504 # does not allow lintian-overrides with architecture segment). However, with a mixed debhelper + debputy, 

1505 # `dh_lintian` allows it with compat 13 or older. 

1506 

1507 if _any_unacceptable_paths( 

1508 fs_root, 

1509 acceptable_no_descend_paths=acceptable_no_descend_paths, 

1510 acceptable_files=acceptable_files, 

1511 ): 

1512 return None 

1513 

1514 return "same" 

1515 

1516 

1517@functools.lru_cache() 

1518def _has_t64_enabled() -> bool: 

1519 try: 

1520 output = subprocess.check_output( 

1521 ["dpkg-buildflags", "--query-features", "abi"] 

1522 ).decode() 

1523 except (subprocess.CalledProcessError, FileNotFoundError): 

1524 return False 

1525 

1526 for stanza in Deb822.iter_paragraphs(output): 

1527 if stanza.get("Feature") == "time64" and stanza.get("Enabled") == "yes": 

1528 return True 

1529 return False 

1530 

1531 

1532def _t64_migration_substvar( 

1533 binary_package: BinaryPackage, 

1534 control_output_dir: VirtualPath, 

1535 substvars: FlushableSubstvars, 

1536) -> None: 

1537 name = binary_package.name 

1538 compat_name = binary_package.fields.get("X-Time64-Compat") 

1539 if compat_name is None and not _T64_REGEX.match(name): 

1540 return 

1541 

1542 if not any( 

1543 p.is_file 

1544 for n in ["symbols", "shlibs"] 

1545 if (p := control_output_dir.get(n)) is not None 

1546 ): 

1547 return 

1548 

1549 if compat_name is None: 

1550 compat_name = name.replace("t64", "", 1) 

1551 if compat_name == name: 

1552 raise AssertionError( 

1553 f"Failed to derive a t64 compat name for {name}. Please file a bug against debputy." 

1554 " As a work around, you can explicitly provide a X-Time64-Compat header in debian/control" 

1555 " where you specify the desired compat name." 

1556 ) 

1557 

1558 arch_bits = binary_package.package_deb_architecture_variable("ARCH_BITS") 

1559 

1560 if arch_bits != "32" or not _has_t64_enabled(): 

1561 substvars.add_dependency( 

1562 _T64_PROVIDES, 

1563 f"{compat_name} (= ${ binary:Version} )", 

1564 ) 

1565 elif _T64_PROVIDES not in substvars: 

1566 substvars[_T64_PROVIDES] = "" 

1567 

1568 

1569@functools.lru_cache() 

1570def dpkg_field_list_pkg_dep() -> Sequence[str]: 

1571 try: 

1572 output = subprocess.check_output( 

1573 [ 

1574 "perl", 

1575 "-MDpkg::Control::Fields", 

1576 "-e", 

1577 r'print "$_\n" for field_list_pkg_dep', 

1578 ] 

1579 ) 

1580 except (FileNotFoundError, subprocess.CalledProcessError): 

1581 _error("Could not run perl -MDpkg::Control::Fields to get a list of fields") 

1582 return output.decode("utf-8").splitlines(keepends=False) 

1583 

1584 

1585def _handle_relationship_substvars( 

1586 source: SourcePackage, 

1587 dctrl_file: BinaryPackage, 

1588 substvars: FlushableSubstvars, 

1589 has_dbgsym: bool, 

1590) -> Optional[str]: 

1591 relationship_fields = dpkg_field_list_pkg_dep() 

1592 relationship_fields_lc = frozenset(x.lower() for x in relationship_fields) 

1593 substvar_fields = collections.defaultdict(list) 

1594 needs_dbgsym_stanza = False 

1595 for substvar_name, substvar in substvars.as_substvar.items(): 

1596 if ":" not in substvar_name: 

1597 continue 

1598 if substvar.assignment_operator in ("$=", "!="): 

1599 # Will create incorrect results if there is a dbgsym and we do nothing 

1600 needs_dbgsym_stanza = True 

1601 

1602 if substvar.assignment_operator == "$=": 

1603 # Automatically handled; no need for manual merging. 

1604 continue 

1605 _, field = substvar_name.rsplit(":", 1) 

1606 field_lc = field.lower() 

1607 if field_lc not in relationship_fields_lc: 

1608 continue 

1609 substvar_fields[field_lc].append("${" + substvar_name + "}") 

1610 

1611 if not has_dbgsym: 

1612 needs_dbgsym_stanza = False 

1613 

1614 if not substvar_fields and not needs_dbgsym_stanza: 

1615 return None 

1616 

1617 replacement_stanza = debian.deb822.Deb822(dctrl_file.fields) 

1618 

1619 for field_name in relationship_fields: 

1620 field_name_lc = field_name.lower() 

1621 addendum = substvar_fields.get(field_name_lc) 

1622 if addendum is None: 

1623 # No merging required 

1624 continue 

1625 substvars_part = ", ".join(addendum) 

1626 existing_value = replacement_stanza.get(field_name) 

1627 

1628 if existing_value is None or existing_value.isspace(): 

1629 final_value = substvars_part 

1630 else: 

1631 existing_value = existing_value.rstrip().rstrip(",") 

1632 final_value = f"{existing_value}, {substvars_part}" 

1633 replacement_stanza[field_name] = final_value 

1634 

1635 tmpdir = generated_content_dir(package=dctrl_file) 

1636 with tempfile.NamedTemporaryFile( 

1637 mode="wb", 

1638 dir=tmpdir, 

1639 suffix="__DEBIAN_control", 

1640 delete=False, 

1641 ) as fd: 

1642 try: 

1643 cast("Any", source.fields).dump(fd) 

1644 except AttributeError: 

1645 debian.deb822.Deb822(source.fields).dump(fd) 

1646 fd.write(b"\n") 

1647 replacement_stanza.dump(fd) 

1648 

1649 if has_dbgsym: 

1650 # Minimal stanza to avoid substvars warnings. Most fields are still set 

1651 # via -D. 

1652 dbgsym_stanza = Deb822() 

1653 dbgsym_stanza["Package"] = f"{dctrl_file.name}-dbgsym" 

1654 dbgsym_stanza["Architecture"] = dctrl_file.fields["Architecture"] 

1655 dbgsym_stanza["Description"] = f"debug symbols for {dctrl_file.name}" 

1656 fd.write(b"\n") 

1657 dbgsym_stanza.dump(fd) 

1658 

1659 return fd.name 

1660 

1661 

1662def _generate_control_files( 

1663 binary_package_data: "BinaryPackageData", 

1664 package_state: "PackageTransformationDefinition", 

1665 control_output_dir: FSControlRootDir, 

1666 fs_root: FSPath, 

1667 substvars: FlushableSubstvars, 

1668 dbgsym_root_fs: Optional[VirtualPath], 

1669 dbgsym_build_ids: Optional[List[str]], 

1670) -> None: 

1671 binary_package = binary_package_data.binary_package 

1672 source_package = binary_package_data.source_package 

1673 package_name = binary_package.name 

1674 extra_common_params = [] 

1675 extra_params_specific = [] 

1676 _ensure_base_substvars_defined(substvars) 

1677 if "Installed-Size" not in substvars: 

1678 # Pass it via cmd-line to make it more visible that we are providing the 

1679 # value. It also prevents the dbgsym package from picking up this value. 

1680 total_size = compute_installed_size(fs_root) + compute_installed_size( 

1681 control_output_dir 

1682 ) 

1683 extra_params_specific.append(f"-VInstalled-Size={total_size}") 

1684 

1685 ma_value = binary_package.fields.get("Multi-Arch") 

1686 if not binary_package.is_udeb and ma_value is None: 

1687 ma_value = auto_compute_multi_arch(binary_package, control_output_dir, fs_root) 

1688 if ma_value is not None: 

1689 _info( 

1690 f'The package "{binary_package.name}" looks like it should be "Multi-Arch: {ma_value}" based' 

1691 ' on the contents and there is no explicit "Multi-Arch" field. Setting the Multi-Arch field' 

1692 ' accordingly in the binary. If this auto-correction is wrong, please add "Multi-Arch: no" to the' 

1693 ' relevant part of "debian/control" to disable this feature.' 

1694 ) 

1695 # We want this to apply to the `-dbgsym` package as well to avoid 

1696 # lintian `debug-package-for-multi-arch-same-pkg-not-coinstallable` 

1697 extra_common_params.append(f"-DMulti-Arch={ma_value}") 

1698 elif ma_value == "no": 

1699 extra_common_params.append("-UMulti-Arch") 

1700 

1701 dbgsym_ids = " ".join(dbgsym_build_ids) if dbgsym_build_ids else "" 

1702 if package_state.binary_version is not None: 

1703 extra_common_params.append(f"-v{package_state.binary_version}") 

1704 

1705 _t64_migration_substvar(binary_package, control_output_dir, substvars) 

1706 

1707 with substvars.flush() as flushed_substvars: 

1708 has_dbgsym = dbgsym_root_fs is not None and any( 

1709 f for f in dbgsym_root_fs.all_paths() if f.is_file 

1710 ) 

1711 dctrl_file = _handle_relationship_substvars( 

1712 source_package, 

1713 binary_package, 

1714 substvars, 

1715 has_dbgsym, 

1716 ) 

1717 if dctrl_file is None: 

1718 dctrl_file = "debian/control" 

1719 

1720 if has_dbgsym: 

1721 assert dbgsym_root_fs is not None # mypy hint 

1722 dbgsym_ctrl_dir = binary_package_data.dbgsym_info.dbgsym_ctrl_dir 

1723 _generate_dbgsym_control_file_if_relevant( 

1724 binary_package, 

1725 dbgsym_root_fs, 

1726 dbgsym_ctrl_dir, 

1727 dbgsym_ids, 

1728 ma_value, 

1729 dctrl_file, 

1730 extra_common_params, 

1731 ) 

1732 generate_md5sums_file( 

1733 dbgsym_ctrl_dir, 

1734 dbgsym_root_fs, 

1735 ) 

1736 elif dbgsym_ids: 

1737 extra_common_params.append(f"-DBuild-Ids={dbgsym_ids}") 

1738 

1739 ctrl_file = os.path.join(control_output_dir.fs_path, "control") 

1740 dpkg_cmd = [ 

1741 "dpkg-gencontrol", 

1742 f"-p{package_name}", 

1743 # FIXME: Support d/<pkg>.changelog at some point. 

1744 "-ldebian/changelog", 

1745 f"-c{dctrl_file}", 

1746 f"-T{flushed_substvars}", 

1747 f"-O{ctrl_file}", 

1748 # Use a placeholder for -P to ensure failure if we forgot to override a path parameter 

1749 "-P/non-existent", 

1750 *extra_common_params, 

1751 *extra_params_specific, 

1752 ] 

1753 print_command(*dpkg_cmd) 

1754 try: 

1755 subprocess.check_call(dpkg_cmd) 

1756 except subprocess.CalledProcessError: 

1757 _error( 

1758 f"Attempting to generate DEBIAN/control file for {package_name} failed. Please review the output from " 

1759 " dpkg-gencontrol above to understand what went wrong." 

1760 ) 

1761 os.chmod(ctrl_file, 0o644) 

1762 

1763 if not binary_package.is_udeb: 

1764 generate_md5sums_file(control_output_dir, fs_root)