Coverage for src/debputy/deb_packaging_support.py: 24%
833 statements
« prev ^ index » next coverage.py v7.8.2, created at 2026-01-26 19:30 +0000
« prev ^ index » next coverage.py v7.8.2, created at 2026-01-26 19:30 +0000
1import collections
2import contextlib
3import dataclasses
4import datetime
5import functools
6import hashlib
7import itertools
8import operator
9import os
10import re
11import shutil
12import subprocess
13import tempfile
14import textwrap
15from contextlib import ExitStack, suppress
16from tempfile import mkstemp
17from typing import (
18 List,
19 Optional,
20 Set,
21 Dict,
22 Tuple,
23 Literal,
24 TypeVar,
25 FrozenSet,
26 cast,
27 Any,
28 Union,
29 AbstractSet,
30 TYPE_CHECKING,
31)
32from collections.abc import Iterable, Sequence, Iterator, Mapping
34import debian.deb822
35from debian.changelog import Changelog
36from debian.deb822 import Deb822
37from debputy._deb_options_profiles import DebBuildOptionsAndProfiles
38from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable
39from debputy.elf_util import find_all_elf_files, ELF_MAGIC
40from debputy.exceptions import DebputyDpkgGensymbolsError, PureVirtualPathError
41from debputy.filesystem_scan import FSPath, FSControlRootDir, VirtualPathBase
42from debputy.maintscript_snippet import (
43 ALL_CONTROL_SCRIPTS,
44 MaintscriptSnippetContainer,
45 STD_CONTROL_SCRIPTS,
46)
47from debputy.packager_provided_files import PackagerProvidedFile
48from debputy.packages import BinaryPackage, SourcePackage
49from debputy.packaging.alternatives import process_alternatives
50from debputy.packaging.debconf_templates import process_debconf_templates
51from debputy.packaging.makeshlibs import (
52 compute_shlibs,
53 ShlibsContent,
54 generate_shlib_dirs,
55 resolve_reserved_provided_file,
56)
57from debputy.plugin.api.feature_set import PluginProvidedFeatureSet
58from debputy.plugin.api.impl import ServiceRegistryImpl
59from debputy.plugin.api.impl_types import (
60 MetadataOrMaintscriptDetector,
61 PackageDataTable,
62 ServiceManagerDetails,
63)
64from debputy.plugin.api.spec import (
65 FlushableSubstvars,
66 VirtualPath,
67 PackageProcessingContext,
68 ServiceDefinition,
69)
70from debputy.plugins.debputy.binary_package_rules import ServiceRule
71from debputy.util import (
72 _error,
73 ensure_dir,
74 assume_not_none,
75 resolve_perl_config,
76 perlxs_api_dependency,
77 detect_fakeroot,
78 grouper,
79 _info,
80 xargs,
81 escape_shell,
82 generated_content_dir,
83 print_command,
84 _warn,
85)
87if TYPE_CHECKING:
88 from debputy.highlevel_manifest import (
89 HighLevelManifest,
90 PackageTransformationDefinition,
91 BinaryPackageData,
92 )
95VP = TypeVar("VP", bound=VirtualPath, covariant=True)
97_T64_REGEX = re.compile("^lib.*t64(?:-nss)?$")
98_T64_PROVIDES = "t64:Provides"
101def generate_md5sums_file(
102 control_output_dir: VirtualPathBase,
103 fs_root: VirtualPath,
104) -> None:
105 conffiles = control_output_dir.get("conffiles")
106 exclude = set()
107 if conffiles and conffiles.is_file:
108 with conffiles.open() as fd:
109 for line in fd:
110 if not line.startswith("/"):
111 continue
112 exclude.add("." + line.rstrip("\n"))
113 files_to_checksum = sorted(
114 (
115 path
116 for path in fs_root.all_paths()
117 if path.is_file and path.path not in exclude
118 ),
119 # Sort in the same order as dh_md5sums, which is not quite the same as dpkg/`all_paths()`
120 # Compare `.../doc/...` vs `.../doc-base/...` if you want to see the difference between
121 # the two approaches.
122 key=lambda p: p.path,
123 )
124 if not files_to_checksum:
125 return
126 with control_output_dir.open_child("md5sums", "w") as md5fd:
127 for member in files_to_checksum:
128 path = member.path
129 assert path.startswith("./")
130 path = path[2:]
131 with member.open(byte_io=True) as f:
132 file_hash = hashlib.md5()
133 while chunk := f.read(8192):
134 file_hash.update(chunk)
135 md5fd.write(f"{file_hash.hexdigest()} {path}\n")
138def install_or_generate_conffiles(
139 ctrl_root: FSPath | FSControlRootDir,
140 fs_root: VirtualPath,
141 reserved_packager_provided_files: dict[str, list[PackagerProvidedFile]],
142) -> None:
143 provided_conffiles_file = resolve_reserved_provided_file(
144 "conffiles",
145 reserved_packager_provided_files,
146 )
147 if ( 147 ↛ 152line 147 didn't jump to line 152 because the condition on line 147 was never true
148 provided_conffiles_file
149 and provided_conffiles_file.is_file
150 and provided_conffiles_file.size > 0
151 ):
152 ctrl_root.insert_file_from_fs_path(
153 "conffiles",
154 provided_conffiles_file.fs_path,
155 mode=0o644,
156 reference_path=provided_conffiles_file,
157 )
158 etc_dir = fs_root.lookup("etc")
159 if etc_dir:
160 _add_conffiles(ctrl_root, (p for p in etc_dir.all_paths() if p.is_file))
163PERL_DEP_PROGRAM = 1
164PERL_DEP_INDEP_PM_MODULE = 2
165PERL_DEP_XS_MODULE = 4
166PERL_DEP_ARCH_PM_MODULE = 8
167PERL_DEP_MA_ANY_INCOMPATIBLE_TYPES = ~(PERL_DEP_PROGRAM | PERL_DEP_INDEP_PM_MODULE)
170@functools.lru_cache(2) # In practice, param will be "perl" or "perl-base"
171def _dpkg_perl_version(package: str) -> str:
172 dpkg_version = None
173 lines = (
174 subprocess.check_output(["dpkg", "-s", package])
175 .decode("utf-8")
176 .splitlines(keepends=False)
177 )
178 for line in lines:
179 if line.startswith("Version: "):
180 dpkg_version = line[8:].strip()
181 break
182 assert dpkg_version is not None
183 return dpkg_version
186def handle_perl_code(
187 dctrl_bin: BinaryPackage,
188 dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable,
189 fs_root: FSPath,
190 substvars: FlushableSubstvars,
191) -> None:
192 perl_config_data = resolve_perl_config(dpkg_architecture_variables, dctrl_bin)
193 detected_dep_requirements = 0
195 # MakeMaker always makes lib and share dirs, but typically only one directory is actually used.
196 for perl_inc_dir in (perl_config_data.vendorarch, perl_config_data.vendorlib):
197 p = fs_root.lookup(perl_inc_dir)
198 if p and p.is_dir:
199 p.prune_if_empty_dir()
201 # FIXME: 80% of this belongs in a metadata detector, but that requires us to expose .walk() in the public API,
202 # which will not be today.
203 for d, pm_mode in [
204 (perl_config_data.vendorlib, PERL_DEP_INDEP_PM_MODULE),
205 (perl_config_data.vendorarch, PERL_DEP_ARCH_PM_MODULE),
206 ]:
207 inc_dir = fs_root.lookup(d)
208 if not inc_dir:
209 continue
210 for path in inc_dir.all_paths():
211 if not path.is_file:
212 continue
213 if path.name.endswith(".so"):
214 detected_dep_requirements |= PERL_DEP_XS_MODULE
215 elif path.name.endswith(".pm"):
216 detected_dep_requirements |= pm_mode
218 for path, children in fs_root.walk():
219 if path.path == "./usr/share/doc":
220 children.clear()
221 continue
222 if (
223 not path.is_file
224 or not path.has_fs_path
225 or not (path.is_executable or path.name.endswith(".pl"))
226 ):
227 continue
229 interpreter = path.interpreter()
230 if interpreter is not None and interpreter.command_full_basename == "perl":
231 detected_dep_requirements |= PERL_DEP_PROGRAM
233 if not detected_dep_requirements:
234 return
235 dpackage = "perl"
236 # FIXME: Currently, dh_perl supports perl-base via manual toggle.
238 dependency = dpackage
239 if not (detected_dep_requirements & PERL_DEP_MA_ANY_INCOMPATIBLE_TYPES):
240 dependency += ":any"
242 if detected_dep_requirements & PERL_DEP_XS_MODULE:
243 dpkg_version = _dpkg_perl_version(dpackage)
244 dependency += f" (>= {dpkg_version})"
245 substvars.add_dependency("perl:Depends", dependency)
247 if detected_dep_requirements & (PERL_DEP_XS_MODULE | PERL_DEP_ARCH_PM_MODULE):
248 substvars.add_dependency("perl:Depends", perlxs_api_dependency())
251def usr_local_transformation(dctrl: BinaryPackage, fs_root: VirtualPath) -> None:
252 path = fs_root.lookup("./usr/local")
253 if path and any(path.iterdir):
254 # There are two key issues:
255 # 1) Getting the generated maintscript carried on to the final maintscript
256 # 2) Making sure that manifest created directories do not trigger the "unused error".
257 _error(
258 f"Replacement of /usr/local paths is currently not supported in debputy (triggered by: {dctrl.name})."
259 )
262def _find_and_analyze_systemd_service_files(
263 fs_root: VirtualPath,
264 systemd_service_dir: Literal["system", "user"],
265) -> Iterable[VirtualPath]:
266 service_dirs = [
267 f"./usr/lib/systemd/{systemd_service_dir}",
268 f"./lib/systemd/{systemd_service_dir}",
269 ]
270 aliases: dict[str, list[str]] = collections.defaultdict(list)
271 seen = set()
272 all_files = []
274 for d in service_dirs:
275 system_dir = fs_root.lookup(d)
276 if not system_dir:
277 continue
278 for child in system_dir.iterdir:
279 if child.is_symlink:
280 dest = os.path.basename(child.readlink())
281 aliases[dest].append(child.name)
282 elif child.is_file and child.name not in seen:
283 seen.add(child.name)
284 all_files.append(child)
286 return all_files
289def detect_systemd_user_service_files(
290 dctrl: BinaryPackage,
291 fs_root: VirtualPath,
292) -> None:
293 for service_file in _find_and_analyze_systemd_service_files(fs_root, "user"):
294 _error(
295 f'Sorry, systemd user services files are not supported at the moment (saw "{service_file.path}"'
296 f" in {dctrl.name})"
297 )
300# Generally, this should match the release date of oldstable or oldoldstable
301_DCH_PRUNE_CUT_OFF_DATE = datetime.date(2019, 7, 6)
302_DCH_MIN_NUM_OF_ENTRIES = 4
305def _prune_dch_file(
306 package: BinaryPackage,
307 path: VirtualPath,
308 is_changelog: bool,
309 keep_versions: set[str] | None,
310 *,
311 trim: bool = True,
312) -> tuple[bool, set[str] | None]:
313 # TODO: Process `d/changelog` once
314 # Note we cannot assume that changelog_file is always `d/changelog` as you can have
315 # per-package changelogs.
316 with path.open() as fd:
317 dch = Changelog(fd)
318 shortened = False
319 important_entries = 0
320 binnmu_entries = []
321 if is_changelog:
322 kept_entries = []
323 for block in dch:
324 if block.other_pairs.get("binary-only", "no") == "yes":
325 # Always keep binNMU entries (they are always in the top) and they do not count
326 # towards our kept_entries limit
327 binnmu_entries.append(block)
328 continue
329 block_date = block.date
330 if block_date is None:
331 _error(f"The Debian changelog was missing date in sign off line")
332 entry_date = datetime.datetime.strptime(
333 block_date, "%a, %d %b %Y %H:%M:%S %z"
334 ).date()
335 if (
336 trim
337 and entry_date < _DCH_PRUNE_CUT_OFF_DATE
338 and important_entries >= _DCH_MIN_NUM_OF_ENTRIES
339 ):
340 shortened = True
341 break
342 # Match debhelper in incrementing after the check.
343 important_entries += 1
344 kept_entries.append(block)
345 else:
346 assert keep_versions is not None
347 # The NEWS files should match the version for the dch to avoid lintian warnings.
348 # If that means we remove all entries in the NEWS file, then we delete the NEWS
349 # file (see #1021607)
350 kept_entries = [b for b in dch if b.version in keep_versions]
351 shortened = len(dch) > len(kept_entries)
352 if shortened and not kept_entries:
353 path.unlink()
354 return True, None
356 if not shortened and not binnmu_entries:
357 return False, None
359 parent_dir = assume_not_none(path.parent_dir)
361 with (
362 path.replace_fs_path_content() as fs_path,
363 open(fs_path, "w", encoding="utf-8") as fd,
364 ):
365 for entry in kept_entries:
366 fd.write(str(entry))
368 if is_changelog and shortened:
369 # For changelog (rather than NEWS) files, add a note about how to
370 # get the full version.
371 msg = textwrap.dedent(
372 f"""\
373 # Older entries have been removed from this changelog.
374 # To read the complete changelog use `apt changelog {package.name}`.
375 """
376 )
377 fd.write(msg)
379 if binnmu_entries:
380 if package.is_arch_all:
381 _error(
382 f"The package {package.name} is architecture all, but it is built during a binNMU. A binNMU build"
383 " must not include architecture all packages"
384 )
386 with (
387 parent_dir.add_file(
388 f"{path.name}.{package.resolved_architecture}"
389 ) as binnmu_changelog,
390 open(
391 binnmu_changelog.fs_path,
392 "w",
393 encoding="utf-8",
394 ) as binnmu_fd,
395 ):
396 for entry in binnmu_entries:
397 binnmu_fd.write(str(entry))
399 if not shortened:
400 return False, None
401 return True, {b.version for b in kept_entries}
404def fixup_debian_changelog_and_news_file(
405 dctrl: BinaryPackage,
406 fs_root: VirtualPath,
407 is_native: bool,
408 build_env: DebBuildOptionsAndProfiles,
409) -> None:
410 doc_dir = fs_root.lookup(f"./usr/share/doc/{dctrl.name}")
411 if not doc_dir:
412 return
413 changelog = doc_dir.get("changelog.Debian")
414 if changelog and is_native:
415 changelog.name = "changelog"
416 elif is_native:
417 changelog = doc_dir.get("changelog")
419 trim = "notrimdch" not in build_env.deb_build_options
421 kept_entries = None
422 pruned_changelog = False
423 if changelog and changelog.has_fs_path:
424 pruned_changelog, kept_entries = _prune_dch_file(
425 dctrl, changelog, True, None, trim=trim
426 )
428 if not trim:
429 return
431 news_file = doc_dir.get("NEWS.Debian")
432 if news_file and news_file.has_fs_path and pruned_changelog:
433 _prune_dch_file(dctrl, news_file, False, kept_entries)
436_UPSTREAM_CHANGELOG_SOURCE_DIRS = [
437 ".",
438 "doc",
439 "docs",
440]
441_UPSTREAM_CHANGELOG_NAMES = {
442 # The value is a priority to match the debhelper order.
443 # - The suffix weights heavier than the basename (because that is what debhelper did)
444 #
445 # We list the name/suffix in order of priority in the code. That makes it easier to
446 # see the priority directly, but it gives the "lowest" value to the most important items
447 f"{n}{s}": (sw, nw)
448 for (nw, n), (sw, s) in itertools.product(
449 enumerate(["changelog", "changes", "history"], start=1),
450 enumerate(["", ".txt", ".md", ".rst", ".org"], start=1),
451 )
452}
453_NONE_TUPLE = (None, (0, 0))
456def _detect_upstream_changelog(names: Iterable[str]) -> str | None:
457 matches = []
458 for name in names:
459 match_priority = _UPSTREAM_CHANGELOG_NAMES.get(name.lower())
460 if match_priority is not None:
461 matches.append((name, match_priority))
462 return min(matches, default=_NONE_TUPLE, key=operator.itemgetter(1))[0]
465def install_upstream_changelog(
466 dctrl_bin: BinaryPackage,
467 fs_root: FSPath,
468 source_fs_root: VirtualPath,
469) -> None:
470 doc_dir = f"./usr/share/doc/{dctrl_bin.name}"
471 bdir = fs_root.lookup(doc_dir)
472 if bdir and not bdir.is_dir:
473 # "/usr/share/doc/foo -> bar" symlink. Avoid croaking on those per:
474 # https://salsa.debian.org/debian/debputy/-/issues/49
475 return
477 if bdir:
478 if bdir.get("changelog") or bdir.get("changelog.gz"):
479 # Upstream's build system already provided the changelog with the correct name.
480 # Accept that as the canonical one.
481 return
482 upstream_changelog = _detect_upstream_changelog(
483 p.name for p in bdir.iterdir if p.is_file and p.has_fs_path and p.size > 0
484 )
485 if upstream_changelog:
486 p = bdir.lookup(upstream_changelog)
487 assert p is not None # Mostly as a typing hint
488 p.name = "changelog"
489 return
490 for dirname in _UPSTREAM_CHANGELOG_SOURCE_DIRS:
491 dir_path = source_fs_root.lookup(dirname)
492 if not dir_path or not dir_path.is_dir:
493 continue
494 changelog_name = _detect_upstream_changelog(
495 p.name
496 for p in dir_path.iterdir
497 if p.is_file and p.has_fs_path and p.size > 0
498 )
499 if changelog_name:
500 if bdir is None: 500 ↛ 502line 500 didn't jump to line 502 because the condition on line 500 was always true
501 bdir = fs_root.mkdirs(doc_dir)
502 bdir.insert_file_from_fs_path(
503 "changelog",
504 dir_path[changelog_name].fs_path,
505 )
506 break
509@dataclasses.dataclass(slots=True)
510class _ElfInfo:
511 path: VirtualPath
512 fs_path: str
513 is_stripped: bool | None = None
514 build_id: str | None = None
515 dbgsym: FSPath | None = None
518def _elf_static_lib_walk_filter(
519 fs_path: VirtualPath,
520 children: list[VP],
521) -> bool:
522 if (
523 fs_path.name == ".build-id"
524 and assume_not_none(fs_path.parent_dir).name == "debug"
525 ):
526 children.clear()
527 return False
528 # Deal with some special cases, where certain files are not supposed to be stripped in a given directory
529 if "debug/" in fs_path.path or fs_path.name.endswith("debug/"):
530 # FIXME: We need a way to opt out of this per #468333/#1016122
531 for so_file in (f for f in list(children) if f.name.endswith(".so")):
532 children.remove(so_file)
533 if "/guile/" in fs_path.path or fs_path.name == "guile":
534 for go_file in (f for f in list(children) if f.name.endswith(".go")):
535 children.remove(go_file)
536 return True
539@contextlib.contextmanager
540def _all_elf_files(fs_root: VirtualPath) -> Iterator[dict[str, _ElfInfo]]:
541 all_elf_files = find_all_elf_files(
542 fs_root,
543 walk_filter=_elf_static_lib_walk_filter,
544 )
545 if not all_elf_files:
546 yield {}
547 return
548 with ExitStack() as cm_stack:
549 resolved = (
550 (p, cm_stack.enter_context(p.replace_fs_path_content()))
551 for p in all_elf_files
552 )
553 elf_info = {
554 fs_path: _ElfInfo(
555 path=assume_not_none(fs_root.lookup(detached_path.path)),
556 fs_path=fs_path,
557 )
558 for detached_path, fs_path in resolved
559 }
560 _resolve_build_ids(elf_info)
561 yield elf_info
564def _find_all_static_libs(
565 fs_root: FSPath,
566) -> Iterator[FSPath]:
567 for path, children in fs_root.walk():
568 # Matching the logic of dh_strip for now.
569 if not _elf_static_lib_walk_filter(path, children):
570 continue
571 if not path.is_file:
572 continue
573 if path.name.startswith("lib") and path.name.endswith("_g.a"):
574 # _g.a are historically ignored. I do not remember why, but guessing the "_g" is
575 # an encoding of gcc's -g parameter into the filename (with -g meaning "I want debug
576 # symbols")
577 continue
578 if not path.has_fs_path:
579 continue
580 with path.open(byte_io=True) as fd:
581 magic = fd.read(8)
582 if magic not in (b"!<arch>\n", b"!<thin>\n"):
583 continue
584 # Maybe we should see if the first file looks like an index file.
585 # Three random .a samples suggests the index file is named "/"
586 # Not sure if we should skip past it and then do the ELF check or just assume
587 # that "index => static lib".
588 data = fd.read(1024 * 1024)
589 if b"\0" not in data and ELF_MAGIC not in data:
590 continue
591 yield path
594@contextlib.contextmanager
595def _all_static_libs(fs_root: FSPath) -> Iterator[list[str]]:
596 all_static_libs = list(_find_all_static_libs(fs_root))
597 if not all_static_libs:
598 yield []
599 return
600 with ExitStack() as cm_stack:
601 resolved: list[str] = [
602 cm_stack.enter_context(p.replace_fs_path_content()) for p in all_static_libs
603 ]
604 yield resolved
607_FILE_BUILD_ID_RE = re.compile(rb"BuildID(?:\[\S+\])?=([A-Fa-f0-9]+)")
610def _resolve_build_ids(elf_info: dict[str, _ElfInfo]) -> None:
611 static_cmd = ["file", "-00", "-N"]
612 if detect_fakeroot():
613 static_cmd.append("--no-sandbox")
615 for cmd in xargs(static_cmd, (i.fs_path for i in elf_info.values())):
616 _info(f"Looking up build-ids via: {escape_shell(*cmd)}")
617 output = subprocess.check_output(cmd)
619 # Trailing "\0" gives an empty element in the end when splitting, so strip it out
620 lines = output.rstrip(b"\0").split(b"\0")
622 for fs_path_b, verdict in grouper(lines, 2, incomplete="strict"):
623 fs_path = fs_path_b.decode("utf-8")
624 info = elf_info[fs_path]
625 info.is_stripped = b"not stripped" not in verdict
626 m = _FILE_BUILD_ID_RE.search(verdict)
627 if m:
628 info.build_id = m.group(1).decode("utf-8")
631def _make_debug_file(
632 objcopy: str, fs_path: str, build_id: str, dbgsym_fs_root: FSPath
633) -> FSPath:
634 dbgsym_dirname = f"./usr/lib/debug/.build-id/{build_id[0:2]}/"
635 dbgsym_basename = f"{build_id[2:]}.debug"
636 dbgsym_dir = dbgsym_fs_root.mkdirs(dbgsym_dirname)
637 if dbgsym_basename in dbgsym_dir:
638 return dbgsym_dir[dbgsym_basename]
639 # objcopy is a pain and includes the basename verbatim when you do `--add-gnu-debuglink` without having an option
640 # to overwrite the physical basename. So we have to ensure that the physical basename matches the installed
641 # basename.
642 with dbgsym_dir.add_file(
643 dbgsym_basename,
644 unlink_if_exists=False,
645 fs_basename_matters=True,
646 subdir_key="dbgsym-build-ids",
647 ) as dbgsym:
648 try:
649 subprocess.check_call(
650 [
651 objcopy,
652 "--only-keep-debug",
653 "--compress-debug-sections",
654 fs_path,
655 dbgsym.fs_path,
656 ]
657 )
658 except subprocess.CalledProcessError:
659 full_command = (
660 f"{objcopy} --only-keep-debug --compress-debug-sections"
661 f" {escape_shell(fs_path, dbgsym.fs_path)}"
662 )
663 _error(
664 f"Attempting to create a .debug file failed. Please review the error message from {objcopy} to"
665 f" understand what went wrong. Full command was: {full_command}"
666 )
667 return dbgsym
670def _strip_binary(strip: str, options: list[str], paths: Iterable[str]) -> None:
671 # We assume the paths are obtained via `p.replace_fs_path_content()`,
672 # which is the case at the time of written and should remain so forever.
673 it = iter(paths)
674 first = next(it, None)
675 if first is None:
676 return
677 static_cmd = [strip]
678 static_cmd.extend(options)
680 for cmd in xargs(static_cmd, itertools.chain((first,), (f for f in it))):
681 _info(f"Removing unnecessary ELF debug info via: {escape_shell(*cmd)}")
682 try:
683 subprocess.check_call(
684 cmd,
685 stdin=subprocess.DEVNULL,
686 restore_signals=True,
687 )
688 except subprocess.CalledProcessError:
689 _error(
690 f"Attempting to remove ELF debug info failed. Please review the error from {strip} above"
691 f" understand what went wrong."
692 )
695def _attach_debug(objcopy: str, elf_binary: VirtualPath, dbgsym: FSPath) -> None:
696 dbgsym_fs_path: str
697 with dbgsym.replace_fs_path_content() as dbgsym_fs_path:
698 cmd = [objcopy, "--add-gnu-debuglink", dbgsym_fs_path, elf_binary.fs_path]
699 print_command(*cmd)
700 try:
701 subprocess.check_call(cmd)
702 except subprocess.CalledProcessError:
703 _error(
704 f"Attempting to attach ELF debug link to ELF binary failed. Please review the error from {objcopy}"
705 f" above understand what went wrong."
706 )
709@functools.lru_cache
710def _has_tool(tool: str) -> bool:
711 return shutil.which(tool) is not None
714def _run_dwz(
715 dctrl: BinaryPackage,
716 dbgsym_fs_root: FSPath,
717 unstripped_elf_info: list[_ElfInfo],
718) -> None:
719 if not unstripped_elf_info or dctrl.is_udeb or not _has_tool("dwz"):
720 return
721 dwz_cmd = ["dwz"]
722 dwz_ma_dir_name = f"usr/lib/debug/.dwz/{dctrl.deb_multiarch}"
723 dwz_ma_basename = f"{dctrl.name}.debug"
724 multifile = f"{dwz_ma_dir_name}/{dwz_ma_basename}"
725 build_time_multifile = None
726 if len(unstripped_elf_info) > 1:
727 fs_content_dir = generated_content_dir()
728 fd, build_time_multifile = mkstemp(suffix=dwz_ma_basename, dir=fs_content_dir)
729 os.close(fd)
730 dwz_cmd.append(f"-m{build_time_multifile}")
731 dwz_cmd.append(f"-M/{multifile}")
733 # TODO: configuration for disabling multi-file and tweaking memory limits
735 dwz_cmd.extend(e.fs_path for e in unstripped_elf_info)
737 _info(f"Deduplicating ELF debug info via: {escape_shell(*dwz_cmd)}")
738 try:
739 subprocess.check_call(dwz_cmd)
740 except subprocess.CalledProcessError:
741 _error(
742 "Attempting to deduplicate ELF info via dwz failed. Please review the output from dwz above"
743 " to understand what went wrong."
744 )
745 if build_time_multifile is not None and os.stat(build_time_multifile).st_size > 0:
746 dwz_dir = dbgsym_fs_root.mkdirs(dwz_ma_dir_name)
747 dwz_dir.insert_file_from_fs_path(
748 dwz_ma_basename,
749 build_time_multifile,
750 mode=0o644,
751 require_copy_on_write=False,
752 follow_symlinks=False,
753 )
756def relocate_dwarves_into_dbgsym_packages(
757 dctrl: BinaryPackage,
758 package_fs_root: FSPath,
759 dbgsym_fs_root: VirtualPath,
760 *,
761 run_dwz: bool = False,
762) -> list[str]:
763 # FIXME: hardlinks
764 with _all_static_libs(package_fs_root) as all_static_files:
765 if all_static_files:
766 strip = dctrl.cross_command("strip")
767 _strip_binary(
768 strip,
769 [
770 "--strip-debug",
771 "--remove-section=.comment",
772 "--remove-section=.note",
773 "--enable-deterministic-archives",
774 "-R",
775 ".gnu.lto_*",
776 "-R",
777 ".gnu.debuglto_*",
778 "-N",
779 "__gnu_lto_slim",
780 "-N",
781 "__gnu_lto_v1",
782 ],
783 all_static_files,
784 )
786 with _all_elf_files(package_fs_root) as all_elf_files:
787 if not all_elf_files:
788 return []
789 objcopy = dctrl.cross_command("objcopy")
790 strip = dctrl.cross_command("strip")
791 unstripped_elf_info = list(
792 e for e in all_elf_files.values() if not e.is_stripped
793 )
795 if run_dwz:
796 _run_dwz(dctrl, dbgsym_fs_root, unstripped_elf_info)
798 for elf_info in unstripped_elf_info:
799 elf_info.dbgsym = _make_debug_file(
800 objcopy,
801 elf_info.fs_path,
802 assume_not_none(elf_info.build_id),
803 dbgsym_fs_root,
804 )
806 # Note: When run strip, we do so also on already stripped ELF binaries because that is what debhelper does!
807 # Executables (defined by mode)
808 _strip_binary(
809 strip,
810 ["--remove-section=.comment", "--remove-section=.note"],
811 (i.fs_path for i in all_elf_files.values() if i.path.is_executable),
812 )
814 # Libraries (defined by mode)
815 _strip_binary(
816 strip,
817 ["--remove-section=.comment", "--remove-section=.note", "--strip-unneeded"],
818 (i.fs_path for i in all_elf_files.values() if not i.path.is_executable),
819 )
821 for elf_info in unstripped_elf_info:
822 _attach_debug(
823 objcopy,
824 assume_not_none(elf_info.path),
825 assume_not_none(elf_info.dbgsym),
826 )
828 # Set for uniqueness
829 all_debug_info = sorted(
830 {assume_not_none(i.build_id) for i in unstripped_elf_info}
831 )
833 dbgsym_doc_dir = dbgsym_fs_root.mkdirs("./usr/share/doc/")
834 dbgsym_doc_dir.add_symlink(f"{dctrl.name}-dbgsym", dctrl.name)
835 return all_debug_info
838def run_package_processors(
839 manifest: "HighLevelManifest",
840 package_metadata_context: PackageProcessingContext,
841 fs_root: VirtualPath,
842) -> None:
843 pppps = manifest.plugin_provided_feature_set.package_processors_in_order()
844 binary_package = package_metadata_context.binary_package
845 for pppp in pppps:
846 if not pppp.applies_to(binary_package):
847 continue
848 pppp.run_package_processor(fs_root, None, package_metadata_context)
851def cross_package_control_files(
852 package_data_table: PackageDataTable,
853 manifest: "HighLevelManifest",
854) -> None:
855 errors = []
856 combined_shlibs = ShlibsContent()
857 shlibs_dir = None
858 shlib_dirs: list[str] = []
859 shlibs_local = manifest.debian_dir.get("shlibs.local")
860 if shlibs_local and shlibs_local.is_file:
861 with shlibs_local.open() as fd:
862 combined_shlibs.add_entries_from_shlibs_file(fd)
864 debputy_plugin_metadata = manifest.plugin_provided_feature_set.plugin_data[
865 "debputy"
866 ]
868 for binary_package_data in package_data_table:
869 binary_package = binary_package_data.binary_package
870 if binary_package.is_arch_all or not binary_package.should_be_acted_on:
871 continue
872 fs_root = binary_package_data.fs_root
873 package_state = manifest.package_state_for(binary_package.name)
874 related_udeb_package = (
875 binary_package_data.package_metadata_context.related_udeb_package
876 )
878 udeb_package_name = related_udeb_package.name if related_udeb_package else None
879 ctrl = binary_package_data.ctrl_creator.for_plugin(
880 debputy_plugin_metadata,
881 "compute_shlibs",
882 )
883 try:
884 soname_info_list = compute_shlibs(
885 binary_package,
886 binary_package_data.control_output_dir.fs_path,
887 fs_root,
888 manifest,
889 udeb_package_name,
890 ctrl,
891 package_state.reserved_packager_provided_files,
892 combined_shlibs,
893 )
894 except DebputyDpkgGensymbolsError as e:
895 errors.append(e.message)
896 else:
897 if soname_info_list:
898 if shlibs_dir is None:
899 shlibs_dir = generated_content_dir(
900 subdir_key="_shlibs_materialization_dir"
901 )
902 generate_shlib_dirs(
903 binary_package,
904 shlibs_dir,
905 soname_info_list,
906 shlib_dirs,
907 )
908 if errors:
909 for error in errors:
910 _warn(error)
911 _error("Stopping due to the errors above")
913 generated_shlibs_local = None
914 if combined_shlibs:
915 if shlibs_dir is None:
916 shlibs_dir = generated_content_dir(subdir_key="_shlibs_materialization_dir")
917 generated_shlibs_local = os.path.join(shlibs_dir, "shlibs.local")
918 with open(generated_shlibs_local, "w", encoding="utf-8") as fd:
919 combined_shlibs.write_to(fd)
920 _info(f"Generated {generated_shlibs_local} for dpkg-shlibdeps")
922 for binary_package_data in package_data_table:
923 binary_package = binary_package_data.binary_package
924 if binary_package.is_arch_all or not binary_package.should_be_acted_on:
925 continue
926 binary_package_data.ctrl_creator.shlibs_details = (
927 generated_shlibs_local,
928 shlib_dirs,
929 )
932def _relevant_service_definitions(
933 service_rule: ServiceRule,
934 service_managers: list[str] | frozenset[str],
935 by_service_manager_key: Mapping[
936 tuple[str, str, str, str], tuple[ServiceManagerDetails, ServiceDefinition[Any]]
937 ],
938 aliases: Mapping[str, Sequence[tuple[str, str, str, str]]],
939) -> Iterable[tuple[tuple[str, str, str, str], ServiceDefinition[Any]]]:
940 as_keys = (key for key in aliases[service_rule.service])
942 pending_queue = {
943 key
944 for key in as_keys
945 if key in by_service_manager_key
946 and service_rule.applies_to_service_manager(key[-1])
947 }
948 relevant_names: dict[tuple[str, str, str, str], ServiceDefinition[Any]] = {}
949 seen_keys = set()
951 if not pending_queue:
952 service_manager_names = ", ".join(sorted(service_managers))
953 _error(
954 f"No none of the service managers ({service_manager_names}) detected a service named"
955 f" {service_rule.service} (type: {service_rule.type_of_service}, scope: {service_rule.service_scope}),"
956 f" but the manifest definition at {service_rule.definition_source} requested that."
957 )
959 while pending_queue:
960 next_key = pending_queue.pop()
961 seen_keys.add(next_key)
962 _, definition = by_service_manager_key[next_key]
963 yield next_key, definition
964 for name in definition.names:
965 for target_key in aliases[name]:
966 if (
967 target_key not in seen_keys
968 and service_rule.applies_to_service_manager(target_key[-1])
969 ):
970 pending_queue.add(target_key)
972 return relevant_names.items()
975def handle_service_management(
976 binary_package_data: "BinaryPackageData",
977 manifest: "HighLevelManifest",
978 package_metadata_context: PackageProcessingContext,
979 fs_root: VirtualPath,
980 feature_set: PluginProvidedFeatureSet,
981) -> None:
983 by_service_manager_key = {}
984 aliases_by_name = collections.defaultdict(list)
986 state = manifest.package_state_for(binary_package_data.binary_package.name)
987 all_service_managers = list(feature_set.service_managers)
988 requested_service_rules = state.requested_service_rules
989 for requested_service_rule in requested_service_rules:
990 if not requested_service_rule.service_managers:
991 continue
992 for manager in requested_service_rule.service_managers:
993 if manager not in feature_set.service_managers:
994 # FIXME: Missing definition source; move to parsing.
995 _error(
996 f"Unknown service manager {manager} used at {requested_service_rule.definition_source}"
997 )
999 for service_manager_details in feature_set.service_managers.values():
1000 service_registry: ServiceRegistryImpl = ServiceRegistryImpl(
1001 service_manager_details
1002 )
1003 service_manager_details.service_detector(
1004 fs_root,
1005 service_registry,
1006 package_metadata_context,
1007 )
1009 service_definitions = service_registry.detected_services
1010 if not service_definitions:
1011 continue
1013 for plugin_provided_definition in service_definitions:
1014 key = (
1015 plugin_provided_definition.name,
1016 plugin_provided_definition.type_of_service,
1017 plugin_provided_definition.service_scope,
1018 service_manager_details.service_manager,
1019 )
1020 by_service_manager_key[key] = (
1021 service_manager_details,
1022 plugin_provided_definition,
1023 )
1025 for name in plugin_provided_definition.names:
1026 aliases_by_name[name].append(key)
1028 for requested_service_rule in requested_service_rules:
1029 explicit_service_managers = requested_service_rule.service_managers is not None
1030 related_service_managers = (
1031 requested_service_rule.service_managers or all_service_managers
1032 )
1033 seen_service_managers = set()
1034 for service_key, service_definition in _relevant_service_definitions(
1035 requested_service_rule,
1036 related_service_managers,
1037 by_service_manager_key,
1038 aliases_by_name,
1039 ):
1040 sm = service_key[-1]
1041 seen_service_managers.add(sm)
1042 by_service_manager_key[service_key] = (
1043 by_service_manager_key[service_key][0],
1044 requested_service_rule.apply_to_service_definition(service_definition),
1045 )
1046 if (
1047 explicit_service_managers
1048 and seen_service_managers != related_service_managers
1049 ):
1050 missing_sms = ", ".join(
1051 sorted(related_service_managers - seen_service_managers)
1052 )
1053 _error(
1054 f"The rule {requested_service_rule.definition_source} explicitly requested which service managers"
1055 f" it should apply to. However, the following service managers did not provide a service of that"
1056 f" name, type and scope: {missing_sms}. Please check the rule is correct and either provide the"
1057 f" missing service or update the definition match the relevant services."
1058 )
1060 per_service_manager = {}
1062 for (
1063 service_manager_details,
1064 plugin_provided_definition,
1065 ) in by_service_manager_key.values():
1066 service_manager = service_manager_details.service_manager
1067 if service_manager not in per_service_manager:
1068 per_service_manager[service_manager] = (
1069 service_manager_details,
1070 [plugin_provided_definition],
1071 )
1072 else:
1073 per_service_manager[service_manager][1].append(plugin_provided_definition)
1075 for (
1076 service_manager_details,
1077 final_service_definitions,
1078 ) in per_service_manager.values():
1079 ctrl = binary_package_data.ctrl_creator.for_plugin(
1080 service_manager_details.plugin_metadata,
1081 service_manager_details.service_manager,
1082 default_snippet_order="service",
1083 )
1084 _info(f"Applying {final_service_definitions}")
1085 service_manager_details.service_integrator(
1086 final_service_definitions,
1087 ctrl,
1088 package_metadata_context,
1089 )
1092def setup_control_files(
1093 binary_package_data: "BinaryPackageData",
1094 manifest: "HighLevelManifest",
1095 dbgsym_fs_root: VirtualPath,
1096 dbgsym_ids: list[str],
1097 package_metadata_context: PackageProcessingContext,
1098 *,
1099 allow_ctrl_file_management: bool = True,
1100) -> None:
1101 binary_package = package_metadata_context.binary_package
1102 control_output_dir = binary_package_data.control_output_dir
1103 control_output_fs_path = control_output_dir.fs_path
1104 fs_root = binary_package_data.fs_root
1105 package_state = manifest.package_state_for(binary_package.name)
1107 feature_set: PluginProvidedFeatureSet = manifest.plugin_provided_feature_set
1108 metadata_maintscript_detectors = feature_set.metadata_maintscript_detectors
1109 substvars = binary_package_data.substvars
1111 snippets = STD_CONTROL_SCRIPTS
1112 generated_triggers = list(binary_package_data.ctrl_creator.generated_triggers())
1114 if binary_package.is_udeb:
1115 # FIXME: Add missing udeb scripts
1116 snippets = ["postinst"]
1118 if allow_ctrl_file_management:
1119 process_alternatives(
1120 binary_package,
1121 fs_root,
1122 package_state.reserved_packager_provided_files,
1123 package_state.maintscript_snippets,
1124 substvars,
1125 )
1126 process_debconf_templates(
1127 binary_package,
1128 package_state.reserved_packager_provided_files,
1129 package_state.maintscript_snippets,
1130 substvars,
1131 control_output_fs_path,
1132 )
1134 handle_service_management(
1135 binary_package_data,
1136 manifest,
1137 package_metadata_context,
1138 fs_root,
1139 feature_set,
1140 )
1142 plugin_detector_definition: MetadataOrMaintscriptDetector
1143 for plugin_detector_definition in itertools.chain.from_iterable(
1144 metadata_maintscript_detectors.values()
1145 ):
1146 if not plugin_detector_definition.applies_to(binary_package):
1147 continue
1148 ctrl = binary_package_data.ctrl_creator.for_plugin(
1149 plugin_detector_definition.plugin_metadata,
1150 plugin_detector_definition.detector_id,
1151 )
1152 plugin_detector_definition.run_detector(
1153 fs_root, ctrl, package_metadata_context
1154 )
1156 for script in snippets:
1157 _generate_snippet(
1158 control_output_fs_path,
1159 script,
1160 package_state.maintscript_snippets,
1161 )
1163 else:
1164 state = manifest.package_state_for(binary_package_data.binary_package.name)
1165 if state.requested_service_rules:
1166 service_source = state.requested_service_rules[0].definition_source
1167 _error(
1168 f"Use of service definitions (such as {service_source}) is not supported in this integration mode"
1169 )
1170 for script, snippet_container in package_state.maintscript_snippets.items():
1171 for snippet in snippet_container.all_snippets():
1172 source = snippet.definition_source
1173 _error(
1174 f"This integration mode cannot use maintscript snippets"
1175 f' (since dh_installdeb has already been called). However, "{source}" triggered'
1176 f" a snippet for {script}. Please remove the offending definition if it is from"
1177 f" the manifest or file a bug if it is caused by a built-in rule."
1178 )
1180 for trigger in generated_triggers:
1181 source = f"{trigger.provider.plugin_name}:{trigger.provider_source_id}"
1182 _error(
1183 f"This integration mode must not generate triggers"
1184 f' (since dh_installdeb has already been called). However, "{source}" created'
1185 f" a trigger. Please remove the offending definition if it is from"
1186 f" the manifest or file a bug if it is caused by a built-in rule."
1187 )
1189 shlibdeps_definition = [
1190 d
1191 for d in metadata_maintscript_detectors["debputy"]
1192 if d.detector_id == "dpkg-shlibdeps"
1193 ][0]
1195 ctrl = binary_package_data.ctrl_creator.for_plugin(
1196 shlibdeps_definition.plugin_metadata,
1197 shlibdeps_definition.detector_id,
1198 )
1199 shlibdeps_definition.run_detector(fs_root, ctrl, package_metadata_context)
1201 dh_staging_dir = os.path.join("debian", binary_package.name, "DEBIAN")
1202 try:
1203 with os.scandir(dh_staging_dir) as it:
1204 existing_control_files = [
1205 f.path
1206 for f in it
1207 if f.is_file(follow_symlinks=False)
1208 and f.name not in ("control", "md5sums")
1209 ]
1210 except FileNotFoundError:
1211 existing_control_files = []
1213 if existing_control_files:
1214 cmd = ["cp", "-a"]
1215 cmd.extend(existing_control_files)
1216 cmd.append(control_output_fs_path)
1217 print_command(*cmd)
1218 subprocess.check_call(cmd)
1220 if binary_package.is_udeb:
1221 _generate_control_files(
1222 binary_package_data,
1223 package_state,
1224 control_output_dir,
1225 fs_root,
1226 substvars,
1227 # We never built udebs due to #797391, so skip over this information,
1228 # when creating the udeb
1229 None,
1230 None,
1231 )
1232 return
1234 if generated_triggers:
1235 assert allow_ctrl_file_management
1236 dest_file = os.path.join(control_output_fs_path, "triggers")
1237 with open(dest_file, "a", encoding="utf-8") as fd:
1238 fd.writelines(
1239 textwrap.dedent(
1240 f"""\
1241 # Added by {t.provider_source_id} from {t.provider.plugin_name}
1242 {t.dpkg_trigger_type} {t.dpkg_trigger_target}
1243 """
1244 )
1245 for t in generated_triggers
1246 )
1247 os.chmod(fd.fileno(), 0o644)
1249 if allow_ctrl_file_management:
1250 install_or_generate_conffiles(
1251 control_output_dir,
1252 fs_root,
1253 package_state.reserved_packager_provided_files,
1254 )
1256 _generate_control_files(
1257 binary_package_data,
1258 package_state,
1259 control_output_dir,
1260 fs_root,
1261 substvars,
1262 dbgsym_fs_root,
1263 dbgsym_ids,
1264 )
1267def _generate_snippet(
1268 control_output_dir: str,
1269 script: str,
1270 maintscript_snippets: dict[str, MaintscriptSnippetContainer],
1271) -> None:
1272 debputy_snippets = maintscript_snippets.get(script)
1273 if debputy_snippets is None:
1274 return
1275 reverse = script in ("prerm", "postrm")
1276 snippets = [
1277 debputy_snippets.generate_snippet(reverse=reverse),
1278 debputy_snippets.generate_snippet(snippet_order="service", reverse=reverse),
1279 ]
1280 if reverse:
1281 snippets = reversed(snippets)
1282 full_content = "".join(f"{s}\n" for s in filter(None, snippets))
1283 if not full_content:
1284 return
1285 filename = os.path.join(control_output_dir, script)
1286 with open(filename, "w") as fd:
1287 fd.write("#!/bin/sh\nset -e\n\n")
1288 fd.write(full_content)
1289 os.chmod(fd.fileno(), 0o755)
1292def _add_conffiles(
1293 ctrl_root: VirtualPathBase,
1294 conffile_matches: Iterable[VirtualPath],
1295) -> None:
1296 it = iter(conffile_matches)
1297 first = next(it, None)
1298 if first is None:
1299 return
1300 conffiles = itertools.chain([first], it)
1301 with ctrl_root.open_child("conffiles", "at") as fd:
1302 for conffile_match in conffiles:
1303 conffile = conffile_match.absolute
1304 assert conffile_match.is_file
1305 fd.write(f"{conffile}\n")
1308def _ensure_base_substvars_defined(substvars: FlushableSubstvars) -> None:
1309 for substvar in ("misc:Depends", "misc:Pre-Depends"):
1310 if substvar not in substvars:
1311 substvars[substvar] = ""
1314def compute_installed_size(fs_root: VirtualPath) -> int:
1315 """Emulate dpkg-gencontrol's code for computing the default Installed-Size"""
1316 size_in_kb = 0
1317 hard_links = set()
1318 for path in fs_root.all_paths():
1319 if path.is_symlink or path.is_file:
1320 try:
1321 # If it is a VirtualPathBase instance, the use its `.stat()` method
1322 # since it might have the stat cached as a minor optimization on disk
1323 # access. Other than that, the `os.lstat` fallback is sufficient.
1324 if isinstance(path, VirtualPathBase): 1324 ↛ 1327line 1324 didn't jump to line 1327 because the condition on line 1324 was always true
1325 st = path.stat()
1326 else:
1327 st = os.lstat(path.fs_path)
1328 if st.st_nlink > 1:
1329 hl_key = (st.st_dev, st.st_ino)
1330 if hl_key in hard_links:
1331 continue
1332 hard_links.add(hl_key)
1333 size = st.st_size
1334 except PureVirtualPathError:
1335 # We just assume it is not a hard link when the path is purely virtual
1336 size = path.size
1337 path_size = (size + 1023) // 1024
1338 else:
1339 path_size = 1
1340 size_in_kb += path_size
1341 return size_in_kb
1344def _generate_dbgsym_control_file_if_relevant(
1345 binary_package: BinaryPackage,
1346 dbgsym_fs_root: VirtualPath,
1347 dbgsym_control_dir: FSControlRootDir,
1348 dbgsym_ids: str,
1349 multi_arch: str | None,
1350 dctrl: str,
1351 extra_common_params: Sequence[str],
1352) -> None:
1353 section = binary_package.archive_section
1354 component = ""
1355 extra_params = []
1356 if section is not None and "/" in section and not section.startswith("main/"):
1357 component = section.split("/", 1)[1] + "/"
1358 if multi_arch != "same":
1359 extra_params.append("-UMulti-Arch")
1360 else:
1361 extra_params.append(f"-DMulti-Arch={multi_arch}")
1362 extra_params.append("-UReplaces")
1363 extra_params.append("-UBreaks")
1364 dbgsym_control_fs_path = dbgsym_control_dir.fs_path
1365 ensure_dir(dbgsym_control_fs_path)
1366 # Pass it via cmd-line to make it more visible that we are providing the
1367 # value. It also prevents the dbgsym package from picking up this value.
1368 total_size = compute_installed_size(dbgsym_fs_root) + compute_installed_size(
1369 dbgsym_control_dir
1370 )
1371 extra_params.append(f"-VInstalled-Size={total_size}")
1372 extra_params.extend(extra_common_params)
1374 package = binary_package.name
1375 package_selector = (
1376 binary_package.name
1377 if dctrl == "debian/control"
1378 else f"{binary_package.name}-dbgsym"
1379 )
1380 dpkg_cmd = [
1381 "dpkg-gencontrol",
1382 f"-p{package_selector}",
1383 # FIXME: Support d/<pkg>.changelog at some point.
1384 "-ldebian/changelog",
1385 "-T/dev/null",
1386 f"-c{dctrl}",
1387 f"-O{dbgsym_control_fs_path}/control",
1388 # Use a placeholder for -P to ensure failure if we forgot to override a path parameter
1389 "-P/non-existent",
1390 f"-DPackage={package}-dbgsym",
1391 "-DDepends=" + package + " (= ${binary:Version})",
1392 f"-DDescription=debug symbols for {package}",
1393 f"-DSection={component}debug",
1394 f"-DBuild-Ids={dbgsym_ids}",
1395 "-UPre-Depends",
1396 "-URecommends",
1397 "-USuggests",
1398 "-UEnhances",
1399 "-UProvides",
1400 "-UEssential",
1401 "-UConflicts",
1402 "-DPriority=optional",
1403 "-UHomepage",
1404 "-UImportant",
1405 "-UBuilt-Using",
1406 "-UStatic-Built-Using",
1407 "-DAuto-Built-Package=debug-symbols",
1408 "-UProtected",
1409 *extra_params,
1410 ]
1411 print_command(*dpkg_cmd)
1412 try:
1413 subprocess.check_call(dpkg_cmd)
1414 except subprocess.CalledProcessError:
1415 _error(
1416 f"Attempting to generate DEBIAN/control file for {package}-dbgsym failed. Please review the output from "
1417 " dpkg-gencontrol above to understand what went wrong."
1418 )
1419 os.chmod(os.path.join(dbgsym_control_fs_path, "control"), 0o644)
1422def _all_parent_directories_of(directories: Iterable[str]) -> set[str]:
1423 result = {"."}
1424 for path in directories:
1425 current = os.path.dirname(path)
1426 while current and current not in result:
1427 result.add(current)
1428 current = os.path.dirname(current)
1429 return result
1432def _compute_multi_arch_for_arch_all_doc(
1433 binary_package: BinaryPackage,
1434 fs_root: FSPath,
1435) -> str | None:
1436 if not binary_package.name.endswith(("-doc", "-docs")):
1437 # We limit by package name, since there are tricks involving a `Multi-Arch: no` depending on a
1438 # `Multi-Arch: same` to emulate `Multi-Arch: allowed`. Said `Multi-Arch: no` can have no contents.
1439 #
1440 # That case seems unrealistic for -doc/-docs packages and accordingly the limitation here.
1441 return None
1442 acceptable_no_descend_paths = {
1443 "./usr/share/doc",
1444 }
1445 acceptable_files = {f"./usr/share/lintian/overrides/{binary_package.name}"}
1446 if _any_unacceptable_paths(
1447 fs_root,
1448 acceptable_no_descend_paths=acceptable_no_descend_paths,
1449 acceptable_files=acceptable_files,
1450 ):
1451 return None
1452 return "foreign"
1455def _any_unacceptable_paths(
1456 fs_root: FSPath,
1457 *,
1458 acceptable_no_descend_paths: list[str] | AbstractSet[str] = frozenset(),
1459 acceptable_files: list[str] | AbstractSet[str] = frozenset(),
1460) -> bool:
1461 acceptable_intermediate_dirs = _all_parent_directories_of(
1462 itertools.chain(acceptable_no_descend_paths, acceptable_files)
1463 )
1464 for fs_path, children in fs_root.walk():
1465 path = fs_path.path
1466 if path in acceptable_no_descend_paths:
1467 children.clear()
1468 continue
1469 if path in acceptable_intermediate_dirs or path in acceptable_files:
1470 continue
1471 return True
1472 return False
1475def auto_compute_multi_arch(
1476 binary_package: BinaryPackage,
1477 control_output_dir: VirtualPath,
1478 fs_root: FSPath,
1479) -> str | None:
1480 resolved_arch = binary_package.resolved_architecture
1481 if any(
1482 script
1483 for script in ALL_CONTROL_SCRIPTS
1484 if (p := control_output_dir.get(script)) is not None and p.is_file
1485 ):
1486 return None
1488 if resolved_arch == "all":
1489 return _compute_multi_arch_for_arch_all_doc(binary_package, fs_root)
1491 resolved_multiarch = binary_package.deb_multiarch
1492 assert resolved_arch != "all"
1493 acceptable_no_descend_paths = {
1494 f"./usr/lib/{resolved_multiarch}",
1495 f"./usr/include/{resolved_multiarch}",
1496 }
1497 acceptable_files = {
1498 f"./usr/share/doc/{binary_package.name}/{basename}"
1499 for basename in (
1500 "copyright",
1501 "changelog.gz",
1502 "changelog.Debian.gz",
1503 f"changelog.Debian.{resolved_arch}.gz",
1504 "NEWS.Debian",
1505 "NEWS.Debian.gz",
1506 "README.Debian",
1507 "README.Debian.gz",
1508 )
1509 }
1511 # Note that the lintian-overrides file is deliberately omitted from the allow-list. We would have to know that the
1512 # override does not use architecture segments. With pure debputy, this is guaranteed (debputy
1513 # does not allow lintian-overrides with architecture segment). However, with a mixed debhelper + debputy,
1514 # `dh_lintian` allows it with compat 13 or older.
1516 if _any_unacceptable_paths(
1517 fs_root,
1518 acceptable_no_descend_paths=acceptable_no_descend_paths,
1519 acceptable_files=acceptable_files,
1520 ):
1521 return None
1523 return "same"
1526@functools.lru_cache
1527def _has_t64_enabled() -> bool:
1528 try:
1529 output = subprocess.check_output(
1530 ["dpkg-buildflags", "--query-features", "abi"]
1531 ).decode()
1532 except (subprocess.CalledProcessError, FileNotFoundError):
1533 return False
1535 for stanza in Deb822.iter_paragraphs(output):
1536 if stanza.get("Feature") == "time64" and stanza.get("Enabled") == "yes":
1537 return True
1538 return False
1541def _t64_migration_substvar(
1542 binary_package: BinaryPackage,
1543 control_output_dir: VirtualPath,
1544 substvars: FlushableSubstvars,
1545) -> None:
1546 name = binary_package.name
1547 compat_name = binary_package.fields.get("X-Time64-Compat")
1548 if compat_name is None and not _T64_REGEX.match(name):
1549 return
1551 if not any(
1552 p.is_file
1553 for n in ["symbols", "shlibs"]
1554 if (p := control_output_dir.get(n)) is not None
1555 ):
1556 return
1558 if compat_name is None:
1559 compat_name = name.replace("t64", "", 1)
1560 if compat_name == name:
1561 raise AssertionError(
1562 f"Failed to derive a t64 compat name for {name}. Please file a bug against debputy."
1563 " As a work around, you can explicitly provide a X-Time64-Compat header in debian/control"
1564 " where you specify the desired compat name."
1565 )
1567 arch_bits = binary_package.package_deb_architecture_variable("ARCH_BITS")
1569 if arch_bits != "32" or not _has_t64_enabled():
1570 substvars.add_dependency(
1571 _T64_PROVIDES,
1572 f"{compat_name} (= ${ binary:Version} )",
1573 )
1574 elif _T64_PROVIDES not in substvars:
1575 substvars[_T64_PROVIDES] = ""
1578@functools.lru_cache
1579def dpkg_field_list_pkg_dep() -> Sequence[str]:
1580 try:
1581 output = subprocess.check_output(
1582 [
1583 "perl",
1584 "-MDpkg::Control::Fields",
1585 "-e",
1586 r'print "$_\n" for field_list_pkg_dep',
1587 ]
1588 )
1589 except (FileNotFoundError, subprocess.CalledProcessError):
1590 _error("Could not run perl -MDpkg::Control::Fields to get a list of fields")
1591 return output.decode("utf-8").splitlines(keepends=False)
1594_SUBSTVARS_FIELDS_NOT_SUPPORTED_BY_DPKG = {
1595 "Commands",
1596}
1599@functools.lru_cache
1600def all_auto_substvars() -> Sequence[str]:
1601 result = [x for x in dpkg_field_list_pkg_dep()]
1602 result.extend(_SUBSTVARS_FIELDS_NOT_SUPPORTED_BY_DPKG)
1603 return tuple(result)
1606def _handle_auto_substvars(
1607 source: SourcePackage,
1608 dctrl_file: BinaryPackage,
1609 substvars: FlushableSubstvars,
1610 has_dbgsym: bool,
1611) -> str | None:
1612 auto_substvars_fields = all_auto_substvars()
1613 auto_substvars_fields_lc = {x.lower(): x for x in auto_substvars_fields}
1614 substvar_fields = collections.defaultdict(set)
1615 needs_dbgsym_stanza = False
1616 for substvar_name, substvar in substvars.as_substvar.items():
1617 if ":" not in substvar_name:
1618 continue
1619 if substvar.assignment_operator in ("$=", "!="):
1620 # Will create incorrect results if there is a dbgsym and we do nothing
1621 needs_dbgsym_stanza = True
1623 if substvar.assignment_operator == "$=":
1624 # Automatically handled; no need for manual merging.
1625 continue
1626 _, field = substvar_name.rsplit(":", 1)
1627 field_lc = field.lower()
1628 if field_lc not in auto_substvars_fields_lc:
1629 continue
1630 substvar_fields[field_lc].add("${" + substvar_name + "}")
1632 if not has_dbgsym:
1633 needs_dbgsym_stanza = False
1635 if not substvar_fields and not needs_dbgsym_stanza:
1636 return None
1638 replacement_stanza = debian.deb822.Deb822(dctrl_file.fields)
1640 for field_name in auto_substvars_fields:
1641 field_name_lc = field_name.lower()
1642 addendum = substvar_fields.get(field_name_lc)
1643 if addendum is None:
1644 # No merging required
1645 continue
1646 substvars_part = ", ".join(sorted(addendum))
1647 existing_value = replacement_stanza.get(field_name)
1649 if existing_value is None or existing_value.isspace():
1650 final_value = substvars_part
1651 else:
1652 existing_value = existing_value.rstrip().rstrip(",")
1653 final_value = f"{existing_value}, {substvars_part}"
1654 replacement_stanza[field_name] = final_value
1655 canonical_field_name = auto_substvars_fields_lc.get(field_name_lc)
1656 # If `dpkg` does not know the field, we need to inject `XB-` in front
1657 # of it.
1658 if (
1659 canonical_field_name
1660 and canonical_field_name in _SUBSTVARS_FIELDS_NOT_SUPPORTED_BY_DPKG
1661 ):
1662 replacement_stanza[f"XB-{canonical_field_name}"] = replacement_stanza[
1663 field_name
1664 ]
1665 del replacement_stanza[field_name]
1667 with suppress(KeyError):
1668 replacement_stanza.order_last("Description")
1670 tmpdir = generated_content_dir(package=dctrl_file)
1671 with tempfile.NamedTemporaryFile(
1672 mode="wb",
1673 dir=tmpdir,
1674 suffix="__DEBIAN_control",
1675 delete=False,
1676 ) as fd:
1677 try:
1678 cast("Any", source.fields).dump(fd)
1679 except AttributeError:
1680 debian.deb822.Deb822(source.fields).dump(fd)
1681 fd.write(b"\n")
1682 replacement_stanza.dump(fd)
1684 if has_dbgsym:
1685 # Minimal stanza to avoid substvars warnings. Most fields are still set
1686 # via -D.
1687 dbgsym_stanza = Deb822()
1688 dbgsym_stanza["Package"] = f"{dctrl_file.name}-dbgsym"
1689 dbgsym_stanza["Architecture"] = dctrl_file.fields["Architecture"]
1690 dbgsym_stanza["Description"] = f"debug symbols for {dctrl_file.name}"
1691 fd.write(b"\n")
1692 dbgsym_stanza.dump(fd)
1694 return fd.name
1697def _generate_control_files(
1698 binary_package_data: "BinaryPackageData",
1699 package_state: "PackageTransformationDefinition",
1700 control_output_dir: FSControlRootDir,
1701 fs_root: FSPath,
1702 substvars: FlushableSubstvars,
1703 dbgsym_root_fs: VirtualPath | None,
1704 dbgsym_build_ids: list[str] | None,
1705) -> None:
1706 binary_package = binary_package_data.binary_package
1707 source_package = binary_package_data.source_package
1708 package_name = binary_package.name
1709 extra_common_params = []
1710 extra_params_specific = []
1711 _ensure_base_substvars_defined(substvars)
1712 if "Installed-Size" not in substvars:
1713 # Pass it via cmd-line to make it more visible that we are providing the
1714 # value. It also prevents the dbgsym package from picking up this value.
1715 total_size = compute_installed_size(fs_root) + compute_installed_size(
1716 control_output_dir
1717 )
1718 extra_params_specific.append(f"-VInstalled-Size={total_size}")
1720 ma_value = binary_package.fields.get("Multi-Arch")
1721 if not binary_package.is_udeb and ma_value is None:
1722 ma_value = auto_compute_multi_arch(binary_package, control_output_dir, fs_root)
1723 if ma_value is not None:
1724 _info(
1725 f'The package "{binary_package.name}" looks like it should be "Multi-Arch: {ma_value}" based'
1726 ' on the contents and there is no explicit "Multi-Arch" field. Setting the Multi-Arch field'
1727 ' accordingly in the binary. If this auto-correction is wrong, please add "Multi-Arch: no" to the'
1728 ' relevant part of "debian/control" to disable this feature.'
1729 )
1730 # We want this to apply to the `-dbgsym` package as well to avoid
1731 # lintian `debug-package-for-multi-arch-same-pkg-not-coinstallable`
1732 extra_common_params.append(f"-DMulti-Arch={ma_value}")
1733 elif ma_value == "no":
1734 extra_common_params.append("-UMulti-Arch")
1736 dbgsym_ids = " ".join(dbgsym_build_ids) if dbgsym_build_ids else ""
1737 if package_state.binary_version is not None:
1738 extra_common_params.append(f"-v{package_state.binary_version}")
1740 _t64_migration_substvar(binary_package, control_output_dir, substvars)
1742 with substvars.flush() as flushed_substvars:
1743 has_dbgsym = dbgsym_root_fs is not None and any(
1744 f for f in dbgsym_root_fs.all_paths() if f.is_file
1745 )
1746 dctrl_file = _handle_auto_substvars(
1747 source_package,
1748 binary_package,
1749 substvars,
1750 has_dbgsym,
1751 )
1752 if dctrl_file is None:
1753 dctrl_file = "debian/control"
1755 if has_dbgsym:
1756 assert dbgsym_root_fs is not None # mypy hint
1757 dbgsym_ctrl_dir = binary_package_data.dbgsym_info.dbgsym_ctrl_dir
1758 _generate_dbgsym_control_file_if_relevant(
1759 binary_package,
1760 dbgsym_root_fs,
1761 dbgsym_ctrl_dir,
1762 dbgsym_ids,
1763 ma_value,
1764 dctrl_file,
1765 extra_common_params,
1766 )
1767 generate_md5sums_file(
1768 dbgsym_ctrl_dir,
1769 dbgsym_root_fs,
1770 )
1771 elif dbgsym_ids:
1772 extra_common_params.append(f"-DBuild-Ids={dbgsym_ids}")
1774 ctrl_file = os.path.join(control_output_dir.fs_path, "control")
1775 dpkg_cmd = [
1776 "dpkg-gencontrol",
1777 f"-p{package_name}",
1778 # FIXME: Support d/<pkg>.changelog at some point.
1779 "-ldebian/changelog",
1780 f"-c{dctrl_file}",
1781 f"-T{flushed_substvars}",
1782 f"-O{ctrl_file}",
1783 # Use a placeholder for -P to ensure failure if we forgot to override a path parameter
1784 "-P/non-existent",
1785 *extra_common_params,
1786 *extra_params_specific,
1787 ]
1788 print_command(*dpkg_cmd)
1789 try:
1790 subprocess.check_call(dpkg_cmd)
1791 except subprocess.CalledProcessError:
1792 _error(
1793 f"Attempting to generate DEBIAN/control file for {package_name} failed. Please review the output from "
1794 " dpkg-gencontrol above to understand what went wrong."
1795 )
1796 os.chmod(ctrl_file, 0o644)
1798 if not binary_package.is_udeb:
1799 generate_md5sums_file(control_output_dir, fs_root)