Coverage for src/debputy/deb_packaging_support.py: 24%
839 statements
« prev ^ index » next coverage.py v7.8.2, created at 2026-04-26 17:00 +0000
« prev ^ index » next coverage.py v7.8.2, created at 2026-04-26 17:00 +0000
1import collections
2import contextlib
3import dataclasses
4import datetime
5import functools
6import hashlib
7import itertools
8import operator
9import os
10import re
11import shutil
12import subprocess
13import tempfile
14import textwrap
15from contextlib import ExitStack, suppress
16from tempfile import mkstemp
17from typing import (
18 Literal,
19 TypeVar,
20 cast,
21 Any,
22 AbstractSet,
23 TYPE_CHECKING,
24)
25from collections.abc import Iterable, Sequence, Iterator, Mapping
27import debian.deb822
28from debian.changelog import Changelog
29from debian.deb822 import Deb822
30from debputy._deb_options_profiles import DebBuildOptionsAndProfiles
31from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable
32from debputy.elf_util import find_all_elf_files, ELF_MAGIC
33from debputy.exceptions import DebputyDpkgGensymbolsError, PureVirtualPathError
34from debputy.filesystem_scan import (
35 FSControlRootDir,
36 VirtualPathBase,
37 InMemoryVirtualPathBase,
38)
39from debputy.maintscript_snippet import (
40 ALL_CONTROL_SCRIPTS,
41 MaintscriptSnippetContainer,
42 STD_CONTROL_SCRIPTS,
43)
44from debputy.packager_provided_files import PackagerProvidedFile
45from debputy.packages import BinaryPackage, SourcePackage
46from debputy.packaging.alternatives import process_alternatives
47from debputy.packaging.debconf_templates import process_debconf_templates
48from debputy.packaging.makeshlibs import (
49 compute_shlibs,
50 ShlibsContent,
51 generate_shlib_dirs,
52 resolve_reserved_provided_file,
53)
54from debputy.plugin.api.feature_set import PluginProvidedFeatureSet
55from debputy.plugin.api.impl import ServiceRegistryImpl
56from debputy.plugin.api.impl_types import (
57 MetadataOrMaintscriptDetector,
58 PackageDataTable,
59 ServiceManagerDetails,
60)
61from debputy.plugin.api.spec import (
62 FlushableSubstvars,
63 VirtualPath,
64 PackageProcessingContext,
65 ServiceDefinition,
66)
67from debputy.plugins.debputy.binary_package_rules import ServiceRule
68from debputy.util import (
69 _error,
70 ensure_dir,
71 assume_not_none,
72 resolve_perl_config,
73 perlxs_api_dependency,
74 detect_fakeroot,
75 grouper,
76 _info,
77 xargs,
78 escape_shell,
79 generated_content_dir,
80 print_command,
81 _warn,
82)
84if TYPE_CHECKING:
85 from debputy.highlevel_manifest import (
86 HighLevelManifest,
87 PackageTransformationDefinition,
88 BinaryPackageData,
89 )
92VP = TypeVar("VP", bound=VirtualPath, covariant=True)
94_T64_REGEX = re.compile("^lib.*t64(?:-nss)?$")
95_T64_PROVIDES = "t64:Provides"
98def generate_md5sums_file(
99 control_output_dir: VirtualPathBase,
100 fs_root: VirtualPath,
101) -> None:
102 conffiles = control_output_dir.get("conffiles")
103 exclude = set()
104 if conffiles and conffiles.is_file:
105 with conffiles.open() as fd:
106 for line in fd:
107 if not line.startswith("/"):
108 continue
109 exclude.add("." + line.rstrip("\n"))
110 files_to_checksum = sorted(
111 (
112 path
113 for path in fs_root.all_paths()
114 if path.is_file and path.path not in exclude
115 ),
116 # Sort in the same order as dh_md5sums, which is not quite the same as dpkg/`all_paths()`
117 # Compare `.../doc/...` vs `.../doc-base/...` if you want to see the difference between
118 # the two approaches.
119 key=lambda p: p.path,
120 )
121 if not files_to_checksum:
122 return
123 with control_output_dir.open_child("md5sums", "w") as md5fd:
124 for member in files_to_checksum:
125 path = member.path
126 assert path.startswith("./")
127 path = path[2:]
128 with member.open(byte_io=True) as f:
129 file_hash = hashlib.md5()
130 while chunk := f.read(8192):
131 file_hash.update(chunk)
132 md5fd.write(f"{file_hash.hexdigest()} {path}\n")
135def install_or_generate_conffiles(
136 ctrl_root: InMemoryVirtualPathBase | FSControlRootDir,
137 fs_root: VirtualPath,
138 reserved_packager_provided_files: dict[str, list[PackagerProvidedFile]],
139) -> None:
140 provided_conffiles_file = resolve_reserved_provided_file(
141 "conffiles",
142 reserved_packager_provided_files,
143 )
144 if ( 144 ↛ 149line 144 didn't jump to line 149 because the condition on line 144 was never true
145 provided_conffiles_file
146 and provided_conffiles_file.is_file
147 and provided_conffiles_file.size > 0
148 ):
149 ctrl_root.insert_file_from_fs_path(
150 "conffiles",
151 provided_conffiles_file.fs_path,
152 mode=0o644,
153 reference_path=provided_conffiles_file,
154 )
155 etc_dir = fs_root.lookup("etc")
156 if etc_dir:
157 _add_conffiles(ctrl_root, (p for p in etc_dir.all_paths() if p.is_file))
160PERL_DEP_PROGRAM = 1
161PERL_DEP_INDEP_PM_MODULE = 2
162PERL_DEP_XS_MODULE = 4
163PERL_DEP_ARCH_PM_MODULE = 8
164PERL_DEP_MA_ANY_INCOMPATIBLE_TYPES = ~(PERL_DEP_PROGRAM | PERL_DEP_INDEP_PM_MODULE)
167@functools.lru_cache(2) # In practice, param will be "perl" or "perl-base"
168def _dpkg_perl_version(package: str) -> str:
169 dpkg_version = None
170 lines = (
171 subprocess.check_output(["dpkg", "-s", package])
172 .decode("utf-8")
173 .splitlines(keepends=False)
174 )
175 for line in lines:
176 if line.startswith("Version: "):
177 dpkg_version = line[8:].strip()
178 break
179 assert dpkg_version is not None
180 return dpkg_version
183def handle_perl_code(
184 dctrl_bin: BinaryPackage,
185 dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable,
186 fs_root: InMemoryVirtualPathBase,
187 substvars: FlushableSubstvars,
188) -> None:
189 perl_config_data = resolve_perl_config(dpkg_architecture_variables, dctrl_bin)
190 detected_dep_requirements = 0
192 # MakeMaker always makes lib and share dirs, but typically only one directory is actually used.
193 for perl_inc_dir in (perl_config_data.vendorarch, perl_config_data.vendorlib):
194 p = fs_root.lookup(perl_inc_dir)
195 if p and p.is_dir:
196 p.prune_if_empty_dir()
198 # FIXME: 80% of this belongs in a metadata detector, but that requires us to expose .walk() in the public API,
199 # which will not be today.
200 for d, pm_mode in [
201 (perl_config_data.vendorlib, PERL_DEP_INDEP_PM_MODULE),
202 (perl_config_data.vendorarch, PERL_DEP_ARCH_PM_MODULE),
203 ]:
204 inc_dir = fs_root.lookup(d)
205 if not inc_dir:
206 continue
207 for path in inc_dir.all_paths():
208 if not path.is_file:
209 continue
210 if path.name.endswith(".so"):
211 detected_dep_requirements |= PERL_DEP_XS_MODULE
212 elif path.name.endswith(".pm"):
213 detected_dep_requirements |= pm_mode
215 for path, children in fs_root.walk():
216 if path.path == "./usr/share/doc":
217 children.clear()
218 continue
219 if (
220 not path.is_file
221 or not path.has_fs_path
222 or not (path.is_executable or path.name.endswith(".pl"))
223 ):
224 continue
226 interpreter = path.interpreter()
227 if interpreter is not None and interpreter.command_full_basename == "perl":
228 detected_dep_requirements |= PERL_DEP_PROGRAM
230 if not detected_dep_requirements:
231 return
232 dpackage = "perl"
233 # FIXME: Currently, dh_perl supports perl-base via manual toggle.
235 dependency = dpackage
236 if not (detected_dep_requirements & PERL_DEP_MA_ANY_INCOMPATIBLE_TYPES):
237 dependency += ":any"
239 if detected_dep_requirements & PERL_DEP_XS_MODULE:
240 dpkg_version = _dpkg_perl_version(dpackage)
241 dependency += f" (>= {dpkg_version})"
242 substvars.add_dependency("perl:Depends", dependency)
244 if detected_dep_requirements & (PERL_DEP_XS_MODULE | PERL_DEP_ARCH_PM_MODULE):
245 substvars.add_dependency("perl:Depends", perlxs_api_dependency())
248def usr_local_transformation(dctrl: BinaryPackage, fs_root: VirtualPath) -> None:
249 path = fs_root.lookup("./usr/local")
250 if path and any(path.iterdir()):
251 # There are two key issues:
252 # 1) Getting the generated maintscript carried on to the final maintscript
253 # 2) Making sure that manifest created directories do not trigger the "unused error".
254 _error(
255 f"Replacement of /usr/local paths is currently not supported in debputy (triggered by: {dctrl.name})."
256 )
259def _find_and_analyze_systemd_service_files(
260 fs_root: VirtualPath,
261 systemd_service_dir: Literal["system", "user"],
262) -> Iterable[VirtualPath]:
263 service_dirs = [
264 f"./usr/lib/systemd/{systemd_service_dir}",
265 f"./lib/systemd/{systemd_service_dir}",
266 ]
267 aliases: dict[str, list[str]] = collections.defaultdict(list)
268 seen = set()
269 all_files = []
271 for d in service_dirs:
272 system_dir = fs_root.lookup(d)
273 if not system_dir:
274 continue
275 for child in system_dir.iterdir():
276 if child.is_symlink:
277 dest = os.path.basename(child.readlink())
278 aliases[dest].append(child.name)
279 elif child.is_file and child.name not in seen:
280 seen.add(child.name)
281 all_files.append(child)
283 return all_files
286def detect_systemd_user_service_files(
287 dctrl: BinaryPackage,
288 fs_root: VirtualPath,
289) -> None:
290 for service_file in _find_and_analyze_systemd_service_files(fs_root, "user"):
291 _error(
292 f'Sorry, systemd user services files are not supported at the moment (saw "{service_file.path}"'
293 f" in {dctrl.name})"
294 )
297# Generally, this should match the release date of oldstable or oldoldstable
298_DCH_PRUNE_CUT_OFF_DATE = datetime.date(2019, 7, 6)
299_DCH_MIN_NUM_OF_ENTRIES = 4
302def _prune_dch_file(
303 package: BinaryPackage,
304 path: VirtualPath,
305 is_changelog: bool,
306 keep_versions: set[str] | None,
307 *,
308 trim: bool = True,
309) -> tuple[bool, set[str] | None]:
310 # TODO: Process `d/changelog` once
311 # Note we cannot assume that changelog_file is always `d/changelog` as you can have
312 # per-package changelogs.
313 with path.open() as fd:
314 dch = Changelog(fd)
315 shortened = False
316 important_entries = 0
317 binnmu_entries = []
318 if is_changelog:
319 kept_entries = []
320 for block in dch:
321 if block.other_pairs.get("binary-only", "no") == "yes":
322 # Always keep binNMU entries (they are always in the top) and they do not count
323 # towards our kept_entries limit
324 binnmu_entries.append(block)
325 continue
326 block_date = block.date
327 if block_date is None:
328 _error("The Debian changelog was missing date in sign off line")
329 try:
330 entry_date = datetime.datetime.strptime(
331 block_date, "%a, %d %b %Y %H:%M:%S %z"
332 ).date()
333 except ValueError:
334 _error(
335 f"Invalid date in the changelog entry for version {block.version}: {block_date!r} (Expected format: 'Thu, 26 Feb 2026 00:00:00 +0000')"
336 )
337 if (
338 trim
339 and entry_date < _DCH_PRUNE_CUT_OFF_DATE
340 and important_entries >= _DCH_MIN_NUM_OF_ENTRIES
341 ):
342 shortened = True
343 break
344 # Match debhelper in incrementing after the check.
345 important_entries += 1
346 kept_entries.append(block)
347 else:
348 assert keep_versions is not None
349 # The NEWS files should match the version for the dch to avoid lintian warnings.
350 # If that means we remove all entries in the NEWS file, then we delete the NEWS
351 # file (see #1021607)
352 kept_entries = [b for b in dch if b.version in keep_versions]
353 shortened = len(dch) > len(kept_entries)
354 if shortened and not kept_entries:
355 path.unlink()
356 return True, None
358 if not shortened and not binnmu_entries:
359 return False, None
361 parent_dir = assume_not_none(path.parent_dir)
363 with (
364 path.replace_fs_path_content() as fs_path,
365 open(fs_path, "w", encoding="utf-8") as fd,
366 ):
367 for entry in kept_entries:
368 fd.write(str(entry))
370 if is_changelog and shortened:
371 # For changelog (rather than NEWS) files, add a note about how to
372 # get the full version.
373 msg = textwrap.dedent(f"""\
374 # Older entries have been removed from this changelog.
375 # To read the complete changelog use `apt changelog {package.name}`.
376 """)
377 fd.write(msg)
379 if binnmu_entries:
380 if package.is_arch_all:
381 _error(
382 f"The package {package.name} is architecture all, but it is built during a binNMU. A binNMU build"
383 " must not include architecture all packages"
384 )
386 with (
387 parent_dir.add_file(
388 f"{path.name}.{package.resolved_architecture}"
389 ) as binnmu_changelog,
390 open(
391 binnmu_changelog.fs_path,
392 "w",
393 encoding="utf-8",
394 ) as binnmu_fd,
395 ):
396 for entry in binnmu_entries:
397 binnmu_fd.write(str(entry))
399 if not shortened:
400 return False, None
401 return True, {b.version for b in kept_entries}
404def fixup_debian_changelog_and_news_file(
405 dctrl: BinaryPackage,
406 fs_root: VirtualPath,
407 is_native: bool,
408 build_env: DebBuildOptionsAndProfiles,
409) -> None:
410 doc_dir = fs_root.lookup(f"./usr/share/doc/{dctrl.name}")
411 if not doc_dir:
412 return
413 changelog = doc_dir.get("changelog.Debian")
414 if changelog and is_native:
415 changelog.name = "changelog"
416 elif is_native:
417 changelog = doc_dir.get("changelog")
419 trim = "notrimdch" not in build_env.deb_build_options
421 kept_entries = None
422 pruned_changelog = False
423 if changelog and changelog.has_fs_path:
424 pruned_changelog, kept_entries = _prune_dch_file(
425 dctrl, changelog, True, None, trim=trim
426 )
428 if not trim:
429 return
431 news_file = doc_dir.get("NEWS.Debian")
432 if news_file and news_file.has_fs_path and pruned_changelog:
433 _prune_dch_file(dctrl, news_file, False, kept_entries)
436_UPSTREAM_CHANGELOG_SOURCE_DIRS = [
437 ".",
438 "doc",
439 "docs",
440]
441_UPSTREAM_CHANGELOG_NAMES = {
442 # The value is a priority to match the debhelper order.
443 # - The suffix weights heavier than the basename (because that is what debhelper did)
444 #
445 # We list the name/suffix in order of priority in the code. That makes it easier to
446 # see the priority directly, but it gives the "lowest" value to the most important items
447 f"{n}{s}": (sw, nw)
448 for (nw, n), (sw, s) in itertools.product(
449 enumerate(["changelog", "changes", "history"], start=1),
450 enumerate(["", ".txt", ".md", ".rst", ".org"], start=1),
451 )
452}
453_NONE_TUPLE = (None, (0, 0))
456def _detect_upstream_changelog(names: Iterable[str]) -> str | None:
457 matches = []
458 for name in names:
459 match_priority = _UPSTREAM_CHANGELOG_NAMES.get(name.lower())
460 if match_priority is not None:
461 matches.append((name, match_priority))
462 return min(matches, default=_NONE_TUPLE, key=operator.itemgetter(1))[0]
465def install_upstream_changelog(
466 dctrl_bin: BinaryPackage,
467 fs_root: InMemoryVirtualPathBase,
468 source_fs_root: VirtualPath,
469) -> None:
470 doc_dir = f"./usr/share/doc/{dctrl_bin.name}"
471 bdir = fs_root.lookup(doc_dir)
472 if bdir and not bdir.is_dir:
473 # "/usr/share/doc/foo -> bar" symlink. Avoid croaking on those per:
474 # https://salsa.debian.org/debian/debputy/-/issues/49
475 return
477 if bdir:
478 if bdir.get("changelog") or bdir.get("changelog.gz"):
479 # Upstream's build system already provided the changelog with the correct name.
480 # Accept that as the canonical one.
481 return
482 upstream_changelog = _detect_upstream_changelog(
483 p.name for p in bdir.iterdir() if p.is_file and p.has_fs_path and p.size > 0
484 )
485 if upstream_changelog:
486 p = bdir.lookup(upstream_changelog)
487 assert p is not None # Mostly as a typing hint
488 p.name = "changelog"
489 return
490 for dirname in _UPSTREAM_CHANGELOG_SOURCE_DIRS:
491 dir_path = source_fs_root.lookup(dirname)
492 if not dir_path or not dir_path.is_dir:
493 continue
494 changelog_name = _detect_upstream_changelog(
495 p.name
496 for p in dir_path.iterdir()
497 if p.is_file and p.has_fs_path and p.size > 0
498 )
499 if changelog_name:
500 if bdir is None: 500 ↛ 502line 500 didn't jump to line 502 because the condition on line 500 was always true
501 bdir = fs_root.mkdirs(doc_dir)
502 bdir.insert_file_from_fs_path(
503 "changelog",
504 dir_path[changelog_name].fs_path,
505 )
506 break
509@dataclasses.dataclass(slots=True)
510class _ElfInfo:
511 path: VirtualPath
512 fs_path: str
513 is_stripped: bool | None = None
514 build_id: str | None = None
515 dbgsym: InMemoryVirtualPathBase | None = None
518def _elf_static_lib_walk_filter(
519 fs_path: VirtualPath,
520 children: list[VP],
521) -> bool:
522 if (
523 fs_path.name == ".build-id"
524 and assume_not_none(fs_path.parent_dir).name == "debug"
525 ):
526 children.clear()
527 return False
528 # Deal with some special cases, where certain files are not supposed to be stripped in a given directory
529 if "debug/" in fs_path.path or fs_path.name.endswith("debug/"):
530 # FIXME: We need a way to opt out of this per #468333/#1016122
531 for so_file in (f for f in list(children) if f.name.endswith(".so")):
532 children.remove(so_file)
533 if "/guile/" in fs_path.path or fs_path.name == "guile":
534 for go_file in (f for f in list(children) if f.name.endswith(".go")):
535 children.remove(go_file)
536 return True
539@contextlib.contextmanager
540def _all_elf_files(fs_root: VirtualPath) -> Iterator[dict[str, _ElfInfo]]:
541 all_elf_files = find_all_elf_files(
542 fs_root,
543 walk_filter=_elf_static_lib_walk_filter,
544 )
545 if not all_elf_files:
546 yield {}
547 return
548 with ExitStack() as cm_stack:
549 resolved = (
550 (p, cm_stack.enter_context(p.replace_fs_path_content()))
551 for p in all_elf_files
552 )
553 elf_info = {
554 fs_path: _ElfInfo(
555 path=assume_not_none(fs_root.lookup(detached_path.path)),
556 fs_path=fs_path,
557 )
558 for detached_path, fs_path in resolved
559 }
560 _resolve_build_ids(elf_info)
561 yield elf_info
564def _find_all_static_libs(
565 fs_root: InMemoryVirtualPathBase,
566) -> Iterator[InMemoryVirtualPathBase]:
567 for path, children in fs_root.walk():
568 # Matching the logic of dh_strip for now.
569 if not _elf_static_lib_walk_filter(path, children):
570 continue
571 if not path.is_file:
572 continue
573 if path.name.startswith("lib") and path.name.endswith("_g.a"):
574 # _g.a are historically ignored. I do not remember why, but guessing the "_g" is
575 # an encoding of gcc's -g parameter into the filename (with -g meaning "I want debug
576 # symbols")
577 continue
578 if not path.has_fs_path:
579 continue
580 with path.open(byte_io=True) as fd:
581 magic = fd.read(8)
582 if magic not in (b"!<arch>\n", b"!<thin>\n"):
583 continue
584 # Maybe we should see if the first file looks like an index file.
585 # Three random .a samples suggests the index file is named "/"
586 # Not sure if we should skip past it and then do the ELF check or just assume
587 # that "index => static lib".
588 data = fd.read(1024 * 1024)
589 if b"\0" not in data and ELF_MAGIC not in data:
590 continue
591 yield path
594@contextlib.contextmanager
595def _all_static_libs(fs_root: InMemoryVirtualPathBase) -> Iterator[list[str]]:
596 all_static_libs = list(_find_all_static_libs(fs_root))
597 if not all_static_libs:
598 yield []
599 return
600 with ExitStack() as cm_stack:
601 resolved: list[str] = [
602 cm_stack.enter_context(p.replace_fs_path_content()) for p in all_static_libs
603 ]
604 yield resolved
607_FILE_BUILD_ID_RE = re.compile(rb"BuildID(?:\[\S+\])?=([A-Fa-f0-9]+)")
610def _resolve_build_ids(elf_info: dict[str, _ElfInfo]) -> None:
611 static_cmd = ["file", "-00", "-N"]
612 if detect_fakeroot():
613 static_cmd.append("--no-sandbox")
615 for cmd in xargs(static_cmd, (i.fs_path for i in elf_info.values())):
616 _info(f"Looking up build-ids via: {escape_shell(*cmd)}")
617 output = subprocess.check_output(cmd)
619 # Trailing "\0" gives an empty element in the end when splitting, so strip it out
620 lines = output.rstrip(b"\0").split(b"\0")
622 for fs_path_b, verdict in grouper(lines, 2, incomplete="strict"):
623 fs_path = fs_path_b.decode("utf-8")
624 info = elf_info[fs_path]
625 info.is_stripped = b"not stripped" not in verdict
626 m = _FILE_BUILD_ID_RE.search(verdict)
627 if m:
628 info.build_id = m.group(1).decode("utf-8")
631def _make_debug_file(
632 objcopy: str,
633 fs_path: str,
634 build_id: str,
635 dbgsym_fs_root: InMemoryVirtualPathBase,
636) -> InMemoryVirtualPathBase:
637 dbgsym_dirname = f"./usr/lib/debug/.build-id/{build_id[0:2]}/"
638 dbgsym_basename = f"{build_id[2:]}.debug"
639 dbgsym_dir = dbgsym_fs_root.mkdirs(dbgsym_dirname)
640 if dbgsym_basename in dbgsym_dir:
641 return dbgsym_dir[dbgsym_basename]
642 # objcopy is a pain and includes the basename verbatim when you do `--add-gnu-debuglink` without having an option
643 # to overwrite the physical basename. So we have to ensure that the physical basename matches the installed
644 # basename.
645 with dbgsym_dir.add_file(
646 dbgsym_basename,
647 unlink_if_exists=False,
648 fs_basename_matters=True,
649 subdir_key="dbgsym-build-ids",
650 ) as dbgsym:
651 try:
652 subprocess.check_call(
653 [
654 objcopy,
655 "--only-keep-debug",
656 "--compress-debug-sections",
657 fs_path,
658 dbgsym.fs_path,
659 ]
660 )
661 except subprocess.CalledProcessError:
662 full_command = (
663 f"{objcopy} --only-keep-debug --compress-debug-sections"
664 f" {escape_shell(fs_path, dbgsym.fs_path)}"
665 )
666 _error(
667 f"Attempting to create a .debug file failed. Please review the error message from {objcopy} to"
668 f" understand what went wrong. Full command was: {full_command}"
669 )
670 return dbgsym
673def _strip_binary(strip: str, options: list[str], paths: Iterable[str]) -> None:
674 # We assume the paths are obtained via `p.replace_fs_path_content()`,
675 # which is the case at the time of written and should remain so forever.
676 it = iter(paths)
677 first = next(it, None)
678 if first is None:
679 return
680 static_cmd = [strip]
681 static_cmd.extend(options)
683 for cmd in xargs(static_cmd, itertools.chain((first,), (f for f in it))):
684 _info(f"Removing unnecessary ELF debug info via: {escape_shell(*cmd)}")
685 try:
686 subprocess.check_call(
687 cmd,
688 stdin=subprocess.DEVNULL,
689 restore_signals=True,
690 )
691 except subprocess.CalledProcessError:
692 _error(
693 f"Attempting to remove ELF debug info failed. Please review the error from {strip} above"
694 f" understand what went wrong."
695 )
698def _attach_debug(
699 objcopy: str, elf_binary: VirtualPath, dbgsym: InMemoryVirtualPathBase
700) -> None:
701 dbgsym_fs_path: str
702 with dbgsym.replace_fs_path_content() as dbgsym_fs_path:
703 cmd = [objcopy, "--add-gnu-debuglink", dbgsym_fs_path, elf_binary.fs_path]
704 print_command(*cmd)
705 try:
706 subprocess.check_call(cmd)
707 except subprocess.CalledProcessError:
708 _error(
709 f"Attempting to attach ELF debug link to ELF binary failed. Please review the error from {objcopy}"
710 f" above understand what went wrong."
711 )
714@functools.lru_cache
715def _has_tool(tool: str) -> bool:
716 return shutil.which(tool) is not None
719def _run_dwz(
720 dctrl: BinaryPackage,
721 dbgsym_fs_root: InMemoryVirtualPathBase,
722 unstripped_elf_info: list[_ElfInfo],
723) -> None:
724 if not unstripped_elf_info or dctrl.is_udeb or not _has_tool("dwz"):
725 return
726 dwz_cmd = ["dwz"]
727 dwz_ma_dir_name = f"usr/lib/debug/.dwz/{dctrl.deb_multiarch}"
728 dwz_ma_basename = f"{dctrl.name}.debug"
729 multifile = f"{dwz_ma_dir_name}/{dwz_ma_basename}"
730 build_time_multifile = None
731 if len(unstripped_elf_info) > 1:
732 fs_content_dir = generated_content_dir()
733 fd, build_time_multifile = mkstemp(suffix=dwz_ma_basename, dir=fs_content_dir)
734 os.close(fd)
735 dwz_cmd.append(f"-m{build_time_multifile}")
736 dwz_cmd.append(f"-M/{multifile}")
738 # TODO: configuration for disabling multi-file and tweaking memory limits
740 dwz_cmd.extend(e.fs_path for e in unstripped_elf_info)
742 _info(f"Deduplicating ELF debug info via: {escape_shell(*dwz_cmd)}")
743 try:
744 subprocess.check_call(dwz_cmd)
745 except subprocess.CalledProcessError:
746 _error(
747 "Attempting to deduplicate ELF info via dwz failed. Please review the output from dwz above"
748 " to understand what went wrong."
749 )
750 if build_time_multifile is not None and os.stat(build_time_multifile).st_size > 0:
751 dwz_dir = dbgsym_fs_root.mkdirs(dwz_ma_dir_name)
752 dwz_dir.insert_file_from_fs_path(
753 dwz_ma_basename,
754 build_time_multifile,
755 mode=0o644,
756 require_copy_on_write=False,
757 follow_symlinks=False,
758 )
761def relocate_dwarves_into_dbgsym_packages(
762 dctrl: BinaryPackage,
763 package_fs_root: InMemoryVirtualPathBase,
764 dbgsym_fs_root: VirtualPath,
765 *,
766 run_dwz: bool = False,
767) -> list[str]:
768 # FIXME: hardlinks
769 with _all_static_libs(package_fs_root) as all_static_files:
770 if all_static_files:
771 strip = dctrl.cross_command("strip")
772 _strip_binary(
773 strip,
774 [
775 "--strip-debug",
776 "--remove-section=.comment",
777 "--remove-section=.note",
778 "--enable-deterministic-archives",
779 "-R",
780 ".gnu.lto_*",
781 "-R",
782 ".gnu.debuglto_*",
783 "-N",
784 "__gnu_lto_slim",
785 "-N",
786 "__gnu_lto_v1",
787 ],
788 all_static_files,
789 )
791 with _all_elf_files(package_fs_root) as all_elf_files:
792 if not all_elf_files:
793 return []
794 objcopy = dctrl.cross_command("objcopy")
795 strip = dctrl.cross_command("strip")
796 unstripped_elf_info = list(
797 e for e in all_elf_files.values() if not e.is_stripped
798 )
800 if run_dwz:
801 _run_dwz(dctrl, dbgsym_fs_root, unstripped_elf_info)
803 for elf_info in unstripped_elf_info:
804 elf_info.dbgsym = _make_debug_file(
805 objcopy,
806 elf_info.fs_path,
807 assume_not_none(elf_info.build_id),
808 dbgsym_fs_root,
809 )
811 # Note: When run strip, we do so also on already stripped ELF binaries because that is what debhelper does!
812 # Executables (defined by mode)
813 _strip_binary(
814 strip,
815 ["--remove-section=.comment", "--remove-section=.note"],
816 (i.fs_path for i in all_elf_files.values() if i.path.is_executable),
817 )
819 # Libraries (defined by mode)
820 _strip_binary(
821 strip,
822 ["--remove-section=.comment", "--remove-section=.note", "--strip-unneeded"],
823 (i.fs_path for i in all_elf_files.values() if not i.path.is_executable),
824 )
826 for elf_info in unstripped_elf_info:
827 _attach_debug(
828 objcopy,
829 assume_not_none(elf_info.path),
830 assume_not_none(elf_info.dbgsym),
831 )
833 # Set for uniqueness
834 all_debug_info = sorted(
835 {assume_not_none(i.build_id) for i in unstripped_elf_info}
836 )
838 dbgsym_doc_dir = dbgsym_fs_root.mkdirs("./usr/share/doc/")
839 dbgsym_doc_dir.add_symlink(f"{dctrl.name}-dbgsym", dctrl.name)
840 return all_debug_info
843def run_package_processors(
844 manifest: "HighLevelManifest",
845 package_metadata_context: PackageProcessingContext,
846 fs_root: VirtualPath,
847) -> None:
848 pppps = manifest.plugin_provided_feature_set.package_processors_in_order()
849 binary_package = package_metadata_context.binary_package
850 for pppp in pppps:
851 if not pppp.applies_to(binary_package):
852 continue
853 pppp.run_package_processor(fs_root, None, package_metadata_context)
856def cross_package_control_files(
857 package_data_table: PackageDataTable,
858 manifest: "HighLevelManifest",
859) -> None:
860 errors = []
861 combined_shlibs = ShlibsContent()
862 shlibs_dir = None
863 shlib_dirs: list[str] = []
864 shlibs_local = manifest.debian_dir.get("shlibs.local")
865 if shlibs_local and shlibs_local.is_file:
866 with shlibs_local.open() as fd:
867 combined_shlibs.add_entries_from_shlibs_file(fd)
869 debputy_plugin_metadata = manifest.plugin_provided_feature_set.plugin_data[
870 "debputy"
871 ]
873 for binary_package_data in package_data_table:
874 binary_package = binary_package_data.binary_package
875 if (
876 binary_package.is_arch_all
877 or not binary_package.should_be_acted_on
878 or binary_package.is_udeb
879 ):
880 continue
881 fs_root = binary_package_data.fs_root
882 package_metadata_context = binary_package_data.package_metadata_context
883 package_state = manifest.package_state_for(binary_package.name)
884 related_udeb_package = package_metadata_context.related_udeb_package
886 udeb_package_name = related_udeb_package.name if related_udeb_package else None
887 ctrl = binary_package_data.ctrl_creator.for_plugin(
888 debputy_plugin_metadata,
889 "compute_shlibs",
890 )
891 try:
892 soname_info_list = compute_shlibs(
893 package_metadata_context,
894 binary_package_data.control_output_dir.fs_path,
895 fs_root,
896 manifest,
897 udeb_package_name,
898 ctrl,
899 package_state.reserved_packager_provided_files,
900 combined_shlibs,
901 )
902 except DebputyDpkgGensymbolsError as e:
903 errors.append(e.message)
904 else:
905 if soname_info_list:
906 if shlibs_dir is None:
907 shlibs_dir = generated_content_dir(
908 subdir_key="_shlibs_materialization_dir"
909 )
910 generate_shlib_dirs(
911 binary_package,
912 shlibs_dir,
913 soname_info_list,
914 shlib_dirs,
915 )
916 if errors:
917 for error in errors:
918 _warn(error)
919 _error("Stopping due to the errors above")
921 generated_shlibs_local = None
922 if combined_shlibs:
923 if shlibs_dir is None:
924 shlibs_dir = generated_content_dir(subdir_key="_shlibs_materialization_dir")
925 generated_shlibs_local = os.path.join(shlibs_dir, "shlibs.local")
926 with open(generated_shlibs_local, "w", encoding="utf-8") as fd:
927 combined_shlibs.write_to(fd)
928 _info(f"Generated {generated_shlibs_local} for dpkg-shlibdeps")
930 for binary_package_data in package_data_table:
931 binary_package = binary_package_data.binary_package
932 if binary_package.is_arch_all or not binary_package.should_be_acted_on:
933 continue
934 binary_package_data.ctrl_creator.shlibs_details = (
935 generated_shlibs_local,
936 shlib_dirs,
937 )
940def _relevant_service_definitions(
941 service_rule: ServiceRule,
942 service_managers: list[str] | frozenset[str],
943 by_service_manager_key: Mapping[
944 tuple[str, str, str, str], tuple[ServiceManagerDetails, ServiceDefinition[Any]]
945 ],
946 aliases: Mapping[str, Sequence[tuple[str, str, str, str]]],
947) -> Iterable[tuple[tuple[str, str, str, str], ServiceDefinition[Any]]]:
948 as_keys = (key for key in aliases[service_rule.service])
950 pending_queue = {
951 key
952 for key in as_keys
953 if key in by_service_manager_key
954 and service_rule.applies_to_service_manager(key[-1])
955 }
956 relevant_names: dict[tuple[str, str, str, str], ServiceDefinition[Any]] = {}
957 seen_keys = set()
959 if not pending_queue:
960 service_manager_names = ", ".join(sorted(service_managers))
961 _error(
962 f"No none of the service managers ({service_manager_names}) detected a service named"
963 f" {service_rule.service} (type: {service_rule.type_of_service}, scope: {service_rule.service_scope}),"
964 f" but the manifest definition at {service_rule.definition_source} requested that."
965 )
967 while pending_queue:
968 next_key = pending_queue.pop()
969 seen_keys.add(next_key)
970 _, definition = by_service_manager_key[next_key]
971 yield next_key, definition
972 for name in definition.names:
973 for target_key in aliases[name]:
974 if (
975 target_key not in seen_keys
976 and service_rule.applies_to_service_manager(target_key[-1])
977 ):
978 pending_queue.add(target_key)
980 return relevant_names.items()
983def handle_service_management(
984 binary_package_data: "BinaryPackageData",
985 manifest: "HighLevelManifest",
986 package_metadata_context: PackageProcessingContext,
987 fs_root: VirtualPath,
988 feature_set: PluginProvidedFeatureSet,
989) -> None:
991 by_service_manager_key = {}
992 aliases_by_name = collections.defaultdict(list)
994 state = manifest.package_state_for(binary_package_data.binary_package.name)
995 all_service_managers = list(feature_set.service_managers)
996 requested_service_rules = state.requested_service_rules
997 for requested_service_rule in requested_service_rules:
998 if not requested_service_rule.service_managers:
999 continue
1000 for manager in requested_service_rule.service_managers:
1001 if manager not in feature_set.service_managers:
1002 # FIXME: Missing definition source; move to parsing.
1003 _error(
1004 f"Unknown service manager {manager} used at {requested_service_rule.definition_source}"
1005 )
1007 for service_manager_details in feature_set.service_managers.values():
1008 service_registry: ServiceRegistryImpl = ServiceRegistryImpl(
1009 service_manager_details
1010 )
1011 service_manager_details.service_detector(
1012 fs_root,
1013 service_registry,
1014 package_metadata_context,
1015 )
1017 service_definitions = service_registry.detected_services
1018 if not service_definitions:
1019 continue
1021 for plugin_provided_definition in service_definitions:
1022 key = (
1023 plugin_provided_definition.name,
1024 plugin_provided_definition.type_of_service,
1025 plugin_provided_definition.service_scope,
1026 service_manager_details.service_manager,
1027 )
1028 by_service_manager_key[key] = (
1029 service_manager_details,
1030 plugin_provided_definition,
1031 )
1033 for name in plugin_provided_definition.names:
1034 aliases_by_name[name].append(key)
1036 for requested_service_rule in requested_service_rules:
1037 explicit_service_managers = requested_service_rule.service_managers is not None
1038 related_service_managers = (
1039 requested_service_rule.service_managers or all_service_managers
1040 )
1041 seen_service_managers = set()
1042 for service_key, service_definition in _relevant_service_definitions(
1043 requested_service_rule,
1044 related_service_managers,
1045 by_service_manager_key,
1046 aliases_by_name,
1047 ):
1048 sm = service_key[-1]
1049 seen_service_managers.add(sm)
1050 by_service_manager_key[service_key] = (
1051 by_service_manager_key[service_key][0],
1052 requested_service_rule.apply_to_service_definition(service_definition),
1053 )
1054 if (
1055 explicit_service_managers
1056 and seen_service_managers != related_service_managers
1057 ):
1058 missing_sms = ", ".join(
1059 sorted(related_service_managers - seen_service_managers)
1060 )
1061 _error(
1062 f"The rule {requested_service_rule.definition_source} explicitly requested which service managers"
1063 f" it should apply to. However, the following service managers did not provide a service of that"
1064 f" name, type and scope: {missing_sms}. Please check the rule is correct and either provide the"
1065 f" missing service or update the definition match the relevant services."
1066 )
1068 per_service_manager = {}
1070 for (
1071 service_manager_details,
1072 plugin_provided_definition,
1073 ) in by_service_manager_key.values():
1074 service_manager = service_manager_details.service_manager
1075 if service_manager not in per_service_manager:
1076 per_service_manager[service_manager] = (
1077 service_manager_details,
1078 [plugin_provided_definition],
1079 )
1080 else:
1081 per_service_manager[service_manager][1].append(plugin_provided_definition)
1083 for (
1084 service_manager_details,
1085 final_service_definitions,
1086 ) in per_service_manager.values():
1087 ctrl = binary_package_data.ctrl_creator.for_plugin(
1088 service_manager_details.plugin_metadata,
1089 service_manager_details.service_manager,
1090 default_snippet_order="service",
1091 )
1092 _info(f"Applying {final_service_definitions}")
1093 service_manager_details.service_integrator(
1094 final_service_definitions,
1095 ctrl,
1096 package_metadata_context,
1097 )
1100def setup_control_files(
1101 binary_package_data: "BinaryPackageData",
1102 manifest: "HighLevelManifest",
1103 dbgsym_fs_root: VirtualPath,
1104 dbgsym_ids: list[str],
1105 package_metadata_context: PackageProcessingContext,
1106 *,
1107 allow_ctrl_file_management: bool = True,
1108) -> None:
1109 binary_package = package_metadata_context.binary_package
1110 control_output_dir = binary_package_data.control_output_dir
1111 control_output_fs_path = control_output_dir.fs_path
1112 fs_root = binary_package_data.fs_root
1113 package_state = manifest.package_state_for(binary_package.name)
1115 feature_set: PluginProvidedFeatureSet = manifest.plugin_provided_feature_set
1116 metadata_maintscript_detectors = feature_set.metadata_maintscript_detectors
1117 substvars = binary_package_data.substvars
1119 snippets = STD_CONTROL_SCRIPTS
1120 generated_triggers = list(binary_package_data.ctrl_creator.generated_triggers())
1122 if binary_package.is_udeb:
1123 # FIXME: Add missing udeb scripts
1124 snippets = ["postinst"]
1126 if allow_ctrl_file_management:
1127 process_alternatives(
1128 binary_package,
1129 fs_root,
1130 package_state.reserved_packager_provided_files,
1131 package_state.maintscript_snippets,
1132 substvars,
1133 )
1134 process_debconf_templates(
1135 binary_package,
1136 package_state.reserved_packager_provided_files,
1137 package_state.maintscript_snippets,
1138 substvars,
1139 control_output_fs_path,
1140 )
1142 handle_service_management(
1143 binary_package_data,
1144 manifest,
1145 package_metadata_context,
1146 fs_root,
1147 feature_set,
1148 )
1150 plugin_detector_definition: MetadataOrMaintscriptDetector
1151 for plugin_detector_definition in itertools.chain.from_iterable(
1152 metadata_maintscript_detectors.values()
1153 ):
1154 if not plugin_detector_definition.applies_to(binary_package):
1155 continue
1156 ctrl = binary_package_data.ctrl_creator.for_plugin(
1157 plugin_detector_definition.plugin_metadata,
1158 plugin_detector_definition.detector_id,
1159 )
1160 plugin_detector_definition.run_detector(
1161 fs_root, ctrl, package_metadata_context
1162 )
1164 for script in snippets:
1165 _generate_snippet(
1166 control_output_fs_path,
1167 script,
1168 package_state.maintscript_snippets,
1169 )
1171 else:
1172 state = manifest.package_state_for(binary_package_data.binary_package.name)
1173 if state.requested_service_rules:
1174 service_source = state.requested_service_rules[0].definition_source
1175 _error(
1176 f"Use of service definitions (such as {service_source}) is not supported in this integration mode"
1177 )
1178 for script, snippet_container in package_state.maintscript_snippets.items():
1179 for snippet in snippet_container.all_snippets():
1180 source = snippet.definition_source
1181 _error(
1182 f"This integration mode cannot use maintscript snippets"
1183 f' (since dh_installdeb has already been called). However, "{source}" triggered'
1184 f" a snippet for {script}. Please remove the offending definition if it is from"
1185 f" the manifest or file a bug if it is caused by a built-in rule."
1186 )
1188 for trigger in generated_triggers:
1189 source = f"{trigger.provider.plugin_name}:{trigger.provider_source_id}"
1190 _error(
1191 f"This integration mode must not generate triggers"
1192 f' (since dh_installdeb has already been called). However, "{source}" created'
1193 f" a trigger. Please remove the offending definition if it is from"
1194 f" the manifest or file a bug if it is caused by a built-in rule."
1195 )
1197 shlibdeps_definition = [
1198 d
1199 for d in metadata_maintscript_detectors["debputy"]
1200 if d.detector_id == "dpkg-shlibdeps"
1201 ][0]
1203 ctrl = binary_package_data.ctrl_creator.for_plugin(
1204 shlibdeps_definition.plugin_metadata,
1205 shlibdeps_definition.detector_id,
1206 )
1207 shlibdeps_definition.run_detector(fs_root, ctrl, package_metadata_context)
1209 dh_staging_dir = os.path.join("debian", binary_package.name, "DEBIAN")
1210 try:
1211 with os.scandir(dh_staging_dir) as it:
1212 existing_control_files = [
1213 f.path
1214 for f in it
1215 if f.is_file(follow_symlinks=False)
1216 and f.name not in ("control", "md5sums")
1217 ]
1218 except FileNotFoundError:
1219 existing_control_files = []
1221 if existing_control_files:
1222 cmd = ["cp", "-a"]
1223 cmd.extend(existing_control_files)
1224 cmd.append(control_output_fs_path)
1225 print_command(*cmd)
1226 subprocess.check_call(cmd)
1228 if binary_package.is_udeb:
1229 _generate_control_files(
1230 binary_package_data,
1231 package_state,
1232 control_output_dir,
1233 fs_root,
1234 substvars,
1235 # We never built udebs due to #797391, so skip over this information,
1236 # when creating the udeb
1237 None,
1238 None,
1239 )
1240 return
1242 if generated_triggers:
1243 assert allow_ctrl_file_management
1244 dest_file = os.path.join(control_output_fs_path, "triggers")
1245 with open(dest_file, "a", encoding="utf-8") as fd:
1246 fd.writelines(textwrap.dedent(f"""\
1247 # Added by {t.provider_source_id} from {t.provider.plugin_name}
1248 {t.dpkg_trigger_type} {t.dpkg_trigger_target}
1249 """) for t in generated_triggers)
1250 os.chmod(fd.fileno(), 0o644)
1252 if allow_ctrl_file_management:
1253 install_or_generate_conffiles(
1254 control_output_dir,
1255 fs_root,
1256 package_state.reserved_packager_provided_files,
1257 )
1259 _generate_control_files(
1260 binary_package_data,
1261 package_state,
1262 control_output_dir,
1263 fs_root,
1264 substvars,
1265 dbgsym_fs_root,
1266 dbgsym_ids,
1267 )
1270def _generate_snippet(
1271 control_output_dir: str,
1272 script: str,
1273 maintscript_snippets: dict[str, MaintscriptSnippetContainer],
1274) -> None:
1275 debputy_snippets = maintscript_snippets.get(script)
1276 if debputy_snippets is None:
1277 return
1278 reverse = script in ("prerm", "postrm")
1279 snippets = [
1280 debputy_snippets.generate_snippet(reverse=reverse),
1281 debputy_snippets.generate_snippet(snippet_order="service", reverse=reverse),
1282 ]
1283 if reverse:
1284 snippets = reversed(snippets)
1285 full_content = "".join(f"{s}\n" for s in filter(None, snippets))
1286 if not full_content:
1287 return
1288 filename = os.path.join(control_output_dir, script)
1289 with open(filename, "w") as fd:
1290 fd.write("#!/bin/sh\nset -e\n\n")
1291 if debputy_snippets.needs_debconf():
1292 fd.write(textwrap.dedent("""\
1293 # Snippet source: debputy (dependency on debconf)
1294 if [ -e /usr/share/debconf/confmodule ]; then
1295 . /usr/share/debconf/confmodule
1296 fi
1298 """))
1299 fd.write(full_content)
1300 os.chmod(fd.fileno(), 0o755) # noqa: python:S2612
1303def _add_conffiles(
1304 ctrl_root: VirtualPathBase,
1305 conffile_matches: Iterable[VirtualPath],
1306) -> None:
1307 it = iter(conffile_matches)
1308 first = next(it, None)
1309 if first is None:
1310 return
1311 conffiles = itertools.chain([first], it)
1312 with ctrl_root.open_child("conffiles", "at") as fd:
1313 for conffile_match in conffiles:
1314 conffile = conffile_match.absolute
1315 assert conffile_match.is_file
1316 fd.write(f"{conffile}\n")
1319def _ensure_base_substvars_defined(substvars: FlushableSubstvars) -> None:
1320 for substvar in ("misc:Depends", "misc:Pre-Depends"):
1321 if substvar not in substvars:
1322 substvars[substvar] = ""
1325def compute_installed_size(fs_root: VirtualPath) -> int:
1326 """Emulate dpkg-gencontrol's code for computing the default Installed-Size"""
1327 size_in_kb = 0
1328 hard_links = set()
1329 for path in fs_root.all_paths():
1330 if path.is_symlink or path.is_file:
1331 try:
1332 # If it is a VirtualPathBase instance, the use its `.stat()` method
1333 # since it might have the stat cached as a minor optimization on disk
1334 # access. Other than that, the `os.lstat` fallback is sufficient.
1335 if isinstance(path, VirtualPathBase): 1335 ↛ 1338line 1335 didn't jump to line 1338 because the condition on line 1335 was always true
1336 st = path.stat()
1337 else:
1338 st = os.lstat(path.fs_path)
1339 if st.st_nlink > 1:
1340 hl_key = (st.st_dev, st.st_ino)
1341 if hl_key in hard_links:
1342 continue
1343 hard_links.add(hl_key)
1344 size = st.st_size
1345 except PureVirtualPathError:
1346 # We just assume it is not a hard link when the path is purely virtual
1347 size = path.size
1348 path_size = (size + 1023) // 1024
1349 else:
1350 path_size = 1
1351 size_in_kb += path_size
1352 return size_in_kb
1355def _generate_dbgsym_control_file_if_relevant(
1356 binary_package: BinaryPackage,
1357 dbgsym_fs_root: VirtualPath,
1358 dbgsym_control_dir: FSControlRootDir,
1359 dbgsym_ids: str,
1360 multi_arch: str | None,
1361 dctrl: str,
1362 extra_common_params: Sequence[str],
1363) -> None:
1364 section = binary_package.archive_section
1365 component = ""
1366 extra_params = []
1367 if section is not None and "/" in section and not section.startswith("main/"):
1368 component = section.split("/", 1)[1] + "/"
1369 if multi_arch != "same":
1370 extra_params.append("-UMulti-Arch")
1371 else:
1372 extra_params.append(f"-DMulti-Arch={multi_arch}")
1373 extra_params.append("-UReplaces")
1374 extra_params.append("-UBreaks")
1375 dbgsym_control_fs_path = dbgsym_control_dir.fs_path
1376 ensure_dir(dbgsym_control_fs_path)
1377 # Pass it via cmd-line to make it more visible that we are providing the
1378 # value. It also prevents the dbgsym package from picking up this value.
1379 total_size = compute_installed_size(dbgsym_fs_root) + compute_installed_size(
1380 dbgsym_control_dir
1381 )
1382 extra_params.append(f"-VInstalled-Size={total_size}")
1383 extra_params.extend(extra_common_params)
1385 package = binary_package.name
1386 package_selector = (
1387 binary_package.name
1388 if dctrl == "debian/control"
1389 else f"{binary_package.name}-dbgsym"
1390 )
1391 dpkg_cmd = [
1392 "dpkg-gencontrol",
1393 f"-p{package_selector}",
1394 # FIXME: Support d/<pkg>.changelog at some point.
1395 "-ldebian/changelog",
1396 "-T/dev/null",
1397 f"-c{dctrl}",
1398 f"-O{dbgsym_control_fs_path}/control",
1399 # Use a placeholder for -P to ensure failure if we forgot to override a path parameter
1400 "-P/non-existent",
1401 f"-DPackage={package}-dbgsym",
1402 "-DDepends=" + package + " (= ${binary:Version})",
1403 f"-DDescription=debug symbols for {package}",
1404 f"-DSection={component}debug",
1405 f"-DBuild-Ids={dbgsym_ids}",
1406 "-UPre-Depends",
1407 "-URecommends",
1408 "-USuggests",
1409 "-UEnhances",
1410 "-UProvides",
1411 "-UEssential",
1412 "-UConflicts",
1413 "-DPriority=optional",
1414 "-UHomepage",
1415 "-UImportant",
1416 "-UBuilt-Using",
1417 "-UStatic-Built-Using",
1418 "-DAuto-Built-Package=debug-symbols",
1419 "-UProtected",
1420 *extra_params,
1421 ]
1422 print_command(*dpkg_cmd)
1423 try:
1424 subprocess.check_call(dpkg_cmd)
1425 except subprocess.CalledProcessError:
1426 _error(
1427 f"Attempting to generate DEBIAN/control file for {package}-dbgsym failed. Please review the output from "
1428 " dpkg-gencontrol above to understand what went wrong."
1429 )
1430 os.chmod(os.path.join(dbgsym_control_fs_path, "control"), 0o644)
1433def _all_parent_directories_of(directories: Iterable[str]) -> set[str]:
1434 result = {"."}
1435 for path in directories:
1436 current = os.path.dirname(path)
1437 while current and current not in result:
1438 result.add(current)
1439 current = os.path.dirname(current)
1440 return result
1443def _compute_multi_arch_for_arch_all_doc(
1444 binary_package: BinaryPackage,
1445 fs_root: InMemoryVirtualPathBase,
1446) -> str | None:
1447 if not binary_package.name.endswith(("-doc", "-docs")):
1448 # We limit by package name, since there are tricks involving a `Multi-Arch: no` depending on a
1449 # `Multi-Arch: same` to emulate `Multi-Arch: allowed`. Said `Multi-Arch: no` can have no contents.
1450 #
1451 # That case seems unrealistic for -doc/-docs packages and accordingly the limitation here.
1452 return None
1453 acceptable_no_descend_paths = {
1454 "./usr/share/doc",
1455 }
1456 acceptable_files = {f"./usr/share/lintian/overrides/{binary_package.name}"}
1457 if _any_unacceptable_paths(
1458 fs_root,
1459 acceptable_no_descend_paths=acceptable_no_descend_paths,
1460 acceptable_files=acceptable_files,
1461 ):
1462 return None
1463 return "foreign"
1466def _any_unacceptable_paths(
1467 fs_root: InMemoryVirtualPathBase,
1468 *,
1469 acceptable_no_descend_paths: list[str] | AbstractSet[str] = frozenset(),
1470 acceptable_files: list[str] | AbstractSet[str] = frozenset(),
1471) -> bool:
1472 acceptable_intermediate_dirs = _all_parent_directories_of(
1473 itertools.chain(acceptable_no_descend_paths, acceptable_files)
1474 )
1475 for fs_path, children in fs_root.walk():
1476 path = fs_path.path
1477 if path in acceptable_no_descend_paths:
1478 children.clear()
1479 continue
1480 if path in acceptable_intermediate_dirs or path in acceptable_files:
1481 continue
1482 return True
1483 return False
1486def auto_compute_multi_arch(
1487 binary_package: BinaryPackage,
1488 control_output_dir: VirtualPath,
1489 fs_root: InMemoryVirtualPathBase,
1490) -> str | None:
1491 resolved_arch = binary_package.resolved_architecture
1492 if any(
1493 script
1494 for script in ALL_CONTROL_SCRIPTS
1495 if (p := control_output_dir.get(script)) is not None and p.is_file
1496 ):
1497 return None
1499 if resolved_arch == "all":
1500 return _compute_multi_arch_for_arch_all_doc(binary_package, fs_root)
1502 resolved_multiarch = binary_package.deb_multiarch
1503 assert resolved_arch != "all"
1504 acceptable_no_descend_paths = {
1505 f"./usr/lib/{resolved_multiarch}",
1506 f"./usr/include/{resolved_multiarch}",
1507 }
1508 acceptable_files = {
1509 f"./usr/share/doc/{binary_package.name}/{basename}"
1510 for basename in (
1511 "copyright",
1512 "changelog.gz",
1513 "changelog.Debian.gz",
1514 f"changelog.Debian.{resolved_arch}.gz",
1515 "NEWS.Debian",
1516 "NEWS.Debian.gz",
1517 "README.Debian",
1518 "README.Debian.gz",
1519 )
1520 }
1522 # Note that the lintian-overrides file is deliberately omitted from the allow-list. We would have to know that the
1523 # override does not use architecture segments. With pure debputy, this is guaranteed (debputy
1524 # does not allow lintian-overrides with architecture segment). However, with a mixed debhelper + debputy,
1525 # `dh_lintian` allows it with compat 13 or older.
1527 if _any_unacceptable_paths(
1528 fs_root,
1529 acceptable_no_descend_paths=acceptable_no_descend_paths,
1530 acceptable_files=acceptable_files,
1531 ):
1532 return None
1534 return "same"
1537@functools.lru_cache
1538def _has_t64_enabled() -> bool:
1539 try:
1540 output = subprocess.check_output(
1541 ["dpkg-buildflags", "--query-features", "abi"]
1542 ).decode()
1543 except (subprocess.CalledProcessError, FileNotFoundError):
1544 return False
1546 for stanza in Deb822.iter_paragraphs(output):
1547 if stanza.get("Feature") == "time64" and stanza.get("Enabled") == "yes":
1548 return True
1549 return False
1552def _t64_migration_substvar(
1553 binary_package: BinaryPackage,
1554 control_output_dir: VirtualPath,
1555 substvars: FlushableSubstvars,
1556) -> None:
1557 name = binary_package.name
1558 compat_name = binary_package.fields.get("X-Time64-Compat")
1559 if compat_name is None and not _T64_REGEX.match(name):
1560 return
1562 if not any(
1563 p.is_file
1564 for n in ["symbols", "shlibs"]
1565 if (p := control_output_dir.get(n)) is not None
1566 ):
1567 return
1569 if compat_name is None:
1570 compat_name = name.replace("t64", "", 1)
1571 if compat_name == name:
1572 raise AssertionError(
1573 f"Failed to derive a t64 compat name for {name}. Please file a bug against debputy."
1574 " As a work around, you can explicitly provide a X-Time64-Compat header in debian/control"
1575 " where you specify the desired compat name."
1576 )
1578 arch_bits = binary_package.package_deb_architecture_variable("ARCH_BITS")
1580 if arch_bits != "32" or not _has_t64_enabled():
1581 substvars.add_dependency(
1582 _T64_PROVIDES,
1583 f"{compat_name} (= ${ binary:Version} )",
1584 )
1585 elif _T64_PROVIDES not in substvars:
1586 substvars[_T64_PROVIDES] = ""
1589@functools.lru_cache
1590def dpkg_field_list_pkg_dep() -> Sequence[str]:
1591 try:
1592 output = subprocess.check_output(
1593 [
1594 "perl",
1595 "-MDpkg::Control::Fields",
1596 "-e",
1597 r'print "$_\n" for field_list_pkg_dep',
1598 ]
1599 )
1600 except (FileNotFoundError, subprocess.CalledProcessError):
1601 _error("Could not run perl -MDpkg::Control::Fields to get a list of fields")
1602 return output.decode("utf-8").splitlines(keepends=False)
1605_SUBSTVARS_FIELDS_NOT_SUPPORTED_BY_DPKG = {
1606 "Commands",
1607}
1610@functools.lru_cache
1611def all_auto_substvars() -> Sequence[str]:
1612 result = [x for x in dpkg_field_list_pkg_dep()]
1613 result.extend(_SUBSTVARS_FIELDS_NOT_SUPPORTED_BY_DPKG)
1614 return tuple(result)
1617def _handle_auto_substvars(
1618 source: SourcePackage,
1619 dctrl_file: BinaryPackage,
1620 substvars: FlushableSubstvars,
1621 has_dbgsym: bool,
1622) -> str | None:
1623 auto_substvars_fields = all_auto_substvars()
1624 auto_substvars_fields_lc = {x.lower(): x for x in auto_substvars_fields}
1625 substvar_fields = collections.defaultdict(set)
1626 needs_dbgsym_stanza = False
1627 for substvar_name, substvar in substvars.as_substvar.items():
1628 if ":" not in substvar_name:
1629 continue
1630 if substvar.assignment_operator in ("$=", "!="):
1631 # Will create incorrect results if there is a dbgsym and we do nothing
1632 needs_dbgsym_stanza = True
1634 if substvar.assignment_operator == "$=":
1635 # Automatically handled; no need for manual merging.
1636 continue
1637 _, field = substvar_name.rsplit(":", 1)
1638 field_lc = field.lower()
1639 if field_lc not in auto_substvars_fields_lc:
1640 continue
1641 substvar_fields[field_lc].add("${" + substvar_name + "}")
1643 if not has_dbgsym:
1644 needs_dbgsym_stanza = False
1646 if not substvar_fields and not needs_dbgsym_stanza:
1647 return None
1649 replacement_stanza = debian.deb822.Deb822(dctrl_file.fields)
1651 for field_name in auto_substvars_fields:
1652 field_name_lc = field_name.lower()
1653 addendum = substvar_fields.get(field_name_lc)
1654 if addendum is None:
1655 # No merging required
1656 continue
1657 substvars_part = ", ".join(sorted(addendum))
1658 existing_value = replacement_stanza.get(field_name)
1660 if existing_value is None or existing_value.isspace():
1661 final_value = substvars_part
1662 else:
1663 existing_value = existing_value.rstrip().rstrip(",")
1664 final_value = f"{existing_value}, {substvars_part}"
1665 replacement_stanza[field_name] = final_value
1666 canonical_field_name = auto_substvars_fields_lc.get(field_name_lc)
1667 # If `dpkg` does not know the field, we need to inject `XB-` in front
1668 # of it.
1669 if (
1670 canonical_field_name
1671 and canonical_field_name in _SUBSTVARS_FIELDS_NOT_SUPPORTED_BY_DPKG
1672 ):
1673 replacement_stanza[f"XB-{canonical_field_name}"] = replacement_stanza[
1674 field_name
1675 ]
1676 del replacement_stanza[field_name]
1678 with suppress(KeyError):
1679 replacement_stanza.order_last("Description")
1681 tmpdir = generated_content_dir(package=dctrl_file)
1682 with tempfile.NamedTemporaryFile(
1683 mode="wb",
1684 dir=tmpdir,
1685 suffix="__DEBIAN_control",
1686 delete=False,
1687 ) as fd:
1688 try:
1689 cast("Any", source.fields).dump(fd)
1690 except AttributeError:
1691 debian.deb822.Deb822(source.fields).dump(fd)
1692 fd.write(b"\n")
1693 replacement_stanza.dump(fd)
1695 if has_dbgsym:
1696 # Minimal stanza to avoid substvars warnings. Most fields are still set
1697 # via -D.
1698 dbgsym_stanza = Deb822()
1699 dbgsym_stanza["Package"] = f"{dctrl_file.name}-dbgsym"
1700 dbgsym_stanza["Architecture"] = dctrl_file.fields["Architecture"]
1701 dbgsym_stanza["Description"] = f"debug symbols for {dctrl_file.name}"
1702 fd.write(b"\n")
1703 dbgsym_stanza.dump(fd)
1705 return fd.name
1708def _generate_control_files(
1709 binary_package_data: "BinaryPackageData",
1710 package_state: "PackageTransformationDefinition",
1711 control_output_dir: FSControlRootDir,
1712 fs_root: InMemoryVirtualPathBase,
1713 substvars: FlushableSubstvars,
1714 dbgsym_root_fs: VirtualPath | None,
1715 dbgsym_build_ids: list[str] | None,
1716) -> None:
1717 binary_package = binary_package_data.binary_package
1718 source_package = binary_package_data.source_package
1719 package_name = binary_package.name
1720 extra_common_params = []
1721 extra_params_specific = []
1722 _ensure_base_substvars_defined(substvars)
1723 if "Installed-Size" not in substvars:
1724 # Pass it via cmd-line to make it more visible that we are providing the
1725 # value. It also prevents the dbgsym package from picking up this value.
1726 total_size = compute_installed_size(fs_root) + compute_installed_size(
1727 control_output_dir
1728 )
1729 extra_params_specific.append(f"-VInstalled-Size={total_size}")
1731 ma_value = binary_package.fields.get("Multi-Arch")
1732 if not binary_package.is_udeb and ma_value is None:
1733 ma_value = auto_compute_multi_arch(binary_package, control_output_dir, fs_root)
1734 if ma_value is not None:
1735 _info(
1736 f'The package "{binary_package.name}" looks like it should be "Multi-Arch: {ma_value}" based'
1737 ' on the contents and there is no explicit "Multi-Arch" field. Setting the Multi-Arch field'
1738 ' accordingly in the binary. If this auto-correction is wrong, please add "Multi-Arch: no" to the'
1739 ' relevant part of "debian/control" to disable this feature.'
1740 )
1741 # We want this to apply to the `-dbgsym` package as well to avoid
1742 # lintian `debug-package-for-multi-arch-same-pkg-not-coinstallable`
1743 extra_common_params.append(f"-DMulti-Arch={ma_value}")
1744 elif ma_value == "no":
1745 extra_common_params.append("-UMulti-Arch")
1747 dbgsym_ids = " ".join(dbgsym_build_ids) if dbgsym_build_ids else ""
1748 if package_state.binary_version is not None:
1749 extra_common_params.append(f"-v{package_state.binary_version}")
1751 _t64_migration_substvar(binary_package, control_output_dir, substvars)
1753 with substvars.flush() as flushed_substvars:
1754 has_dbgsym = dbgsym_root_fs is not None and any(
1755 f for f in dbgsym_root_fs.all_paths() if f.is_file
1756 )
1757 dctrl_file = _handle_auto_substvars(
1758 source_package,
1759 binary_package,
1760 substvars,
1761 has_dbgsym,
1762 )
1763 if dctrl_file is None:
1764 dctrl_file = "debian/control"
1766 if has_dbgsym:
1767 assert dbgsym_root_fs is not None # mypy hint
1768 dbgsym_ctrl_dir = binary_package_data.dbgsym_info.dbgsym_ctrl_dir
1769 _generate_dbgsym_control_file_if_relevant(
1770 binary_package,
1771 dbgsym_root_fs,
1772 dbgsym_ctrl_dir,
1773 dbgsym_ids,
1774 ma_value,
1775 dctrl_file,
1776 extra_common_params,
1777 )
1778 generate_md5sums_file(
1779 dbgsym_ctrl_dir,
1780 dbgsym_root_fs,
1781 )
1782 elif dbgsym_ids:
1783 extra_common_params.append(f"-DBuild-Ids={dbgsym_ids}")
1785 ctrl_file = os.path.join(control_output_dir.fs_path, "control")
1786 dpkg_cmd = [
1787 "dpkg-gencontrol",
1788 f"-p{package_name}",
1789 # FIXME: Support d/<pkg>.changelog at some point.
1790 "-ldebian/changelog",
1791 f"-c{dctrl_file}",
1792 f"-T{flushed_substvars}",
1793 f"-O{ctrl_file}",
1794 # Use a placeholder for -P to ensure failure if we forgot to override a path parameter
1795 "-P/non-existent",
1796 *extra_common_params,
1797 *extra_params_specific,
1798 ]
1799 print_command(*dpkg_cmd)
1800 try:
1801 subprocess.check_call(dpkg_cmd)
1802 except subprocess.CalledProcessError:
1803 _error(
1804 f"Attempting to generate DEBIAN/control file for {package_name} failed. Please review the output from "
1805 " dpkg-gencontrol above to understand what went wrong."
1806 )
1807 os.chmod(ctrl_file, 0o644)
1809 if not binary_package.is_udeb:
1810 generate_md5sums_file(control_output_dir, fs_root)