Coverage for src/debputy/deb_packaging_support.py: 24%
836 statements
« prev ^ index » next coverage.py v7.8.2, created at 2026-04-19 20:37 +0000
« prev ^ index » next coverage.py v7.8.2, created at 2026-04-19 20:37 +0000
1import collections
2import contextlib
3import dataclasses
4import datetime
5import functools
6import hashlib
7import itertools
8import operator
9import os
10import re
11import shutil
12import subprocess
13import tempfile
14import textwrap
15from contextlib import ExitStack, suppress
16from tempfile import mkstemp
17from typing import (
18 Literal,
19 TypeVar,
20 cast,
21 Any,
22 AbstractSet,
23 TYPE_CHECKING,
24)
25from collections.abc import Iterable, Sequence, Iterator, Mapping
27import debian.deb822
28from debian.changelog import Changelog
29from debian.deb822 import Deb822
30from debputy._deb_options_profiles import DebBuildOptionsAndProfiles
31from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable
32from debputy.elf_util import find_all_elf_files, ELF_MAGIC
33from debputy.exceptions import DebputyDpkgGensymbolsError, PureVirtualPathError
34from debputy.filesystem_scan import (
35 FSControlRootDir,
36 VirtualPathBase,
37 InMemoryVirtualPathBase,
38)
39from debputy.maintscript_snippet import (
40 ALL_CONTROL_SCRIPTS,
41 MaintscriptSnippetContainer,
42 STD_CONTROL_SCRIPTS,
43)
44from debputy.packager_provided_files import PackagerProvidedFile
45from debputy.packages import BinaryPackage, SourcePackage
46from debputy.packaging.alternatives import process_alternatives
47from debputy.packaging.debconf_templates import process_debconf_templates
48from debputy.packaging.makeshlibs import (
49 compute_shlibs,
50 ShlibsContent,
51 generate_shlib_dirs,
52 resolve_reserved_provided_file,
53)
54from debputy.plugin.api.feature_set import PluginProvidedFeatureSet
55from debputy.plugin.api.impl import ServiceRegistryImpl
56from debputy.plugin.api.impl_types import (
57 MetadataOrMaintscriptDetector,
58 PackageDataTable,
59 ServiceManagerDetails,
60)
61from debputy.plugin.api.spec import (
62 FlushableSubstvars,
63 VirtualPath,
64 PackageProcessingContext,
65 ServiceDefinition,
66)
67from debputy.plugins.debputy.binary_package_rules import ServiceRule
68from debputy.util import (
69 _error,
70 ensure_dir,
71 assume_not_none,
72 resolve_perl_config,
73 perlxs_api_dependency,
74 detect_fakeroot,
75 grouper,
76 _info,
77 xargs,
78 escape_shell,
79 generated_content_dir,
80 print_command,
81 _warn,
82)
84if TYPE_CHECKING:
85 from debputy.highlevel_manifest import (
86 HighLevelManifest,
87 PackageTransformationDefinition,
88 BinaryPackageData,
89 )
92VP = TypeVar("VP", bound=VirtualPath, covariant=True)
94_T64_REGEX = re.compile("^lib.*t64(?:-nss)?$")
95_T64_PROVIDES = "t64:Provides"
98def generate_md5sums_file(
99 control_output_dir: VirtualPathBase,
100 fs_root: VirtualPath,
101) -> None:
102 conffiles = control_output_dir.get("conffiles")
103 exclude = set()
104 if conffiles and conffiles.is_file:
105 with conffiles.open() as fd:
106 for line in fd:
107 if not line.startswith("/"):
108 continue
109 exclude.add("." + line.rstrip("\n"))
110 files_to_checksum = sorted(
111 (
112 path
113 for path in fs_root.all_paths()
114 if path.is_file and path.path not in exclude
115 ),
116 # Sort in the same order as dh_md5sums, which is not quite the same as dpkg/`all_paths()`
117 # Compare `.../doc/...` vs `.../doc-base/...` if you want to see the difference between
118 # the two approaches.
119 key=lambda p: p.path,
120 )
121 if not files_to_checksum:
122 return
123 with control_output_dir.open_child("md5sums", "w") as md5fd:
124 for member in files_to_checksum:
125 path = member.path
126 assert path.startswith("./")
127 path = path[2:]
128 with member.open(byte_io=True) as f:
129 file_hash = hashlib.md5()
130 while chunk := f.read(8192):
131 file_hash.update(chunk)
132 md5fd.write(f"{file_hash.hexdigest()} {path}\n")
135def install_or_generate_conffiles(
136 ctrl_root: InMemoryVirtualPathBase | FSControlRootDir,
137 fs_root: VirtualPath,
138 reserved_packager_provided_files: dict[str, list[PackagerProvidedFile]],
139) -> None:
140 provided_conffiles_file = resolve_reserved_provided_file(
141 "conffiles",
142 reserved_packager_provided_files,
143 )
144 if ( 144 ↛ 149line 144 didn't jump to line 149 because the condition on line 144 was never true
145 provided_conffiles_file
146 and provided_conffiles_file.is_file
147 and provided_conffiles_file.size > 0
148 ):
149 ctrl_root.insert_file_from_fs_path(
150 "conffiles",
151 provided_conffiles_file.fs_path,
152 mode=0o644,
153 reference_path=provided_conffiles_file,
154 )
155 etc_dir = fs_root.lookup("etc")
156 if etc_dir:
157 _add_conffiles(ctrl_root, (p for p in etc_dir.all_paths() if p.is_file))
160PERL_DEP_PROGRAM = 1
161PERL_DEP_INDEP_PM_MODULE = 2
162PERL_DEP_XS_MODULE = 4
163PERL_DEP_ARCH_PM_MODULE = 8
164PERL_DEP_MA_ANY_INCOMPATIBLE_TYPES = ~(PERL_DEP_PROGRAM | PERL_DEP_INDEP_PM_MODULE)
167@functools.lru_cache(2) # In practice, param will be "perl" or "perl-base"
168def _dpkg_perl_version(package: str) -> str:
169 dpkg_version = None
170 lines = (
171 subprocess.check_output(["dpkg", "-s", package])
172 .decode("utf-8")
173 .splitlines(keepends=False)
174 )
175 for line in lines:
176 if line.startswith("Version: "):
177 dpkg_version = line[8:].strip()
178 break
179 assert dpkg_version is not None
180 return dpkg_version
183def handle_perl_code(
184 dctrl_bin: BinaryPackage,
185 dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable,
186 fs_root: InMemoryVirtualPathBase,
187 substvars: FlushableSubstvars,
188) -> None:
189 perl_config_data = resolve_perl_config(dpkg_architecture_variables, dctrl_bin)
190 detected_dep_requirements = 0
192 # MakeMaker always makes lib and share dirs, but typically only one directory is actually used.
193 for perl_inc_dir in (perl_config_data.vendorarch, perl_config_data.vendorlib):
194 p = fs_root.lookup(perl_inc_dir)
195 if p and p.is_dir:
196 p.prune_if_empty_dir()
198 # FIXME: 80% of this belongs in a metadata detector, but that requires us to expose .walk() in the public API,
199 # which will not be today.
200 for d, pm_mode in [
201 (perl_config_data.vendorlib, PERL_DEP_INDEP_PM_MODULE),
202 (perl_config_data.vendorarch, PERL_DEP_ARCH_PM_MODULE),
203 ]:
204 inc_dir = fs_root.lookup(d)
205 if not inc_dir:
206 continue
207 for path in inc_dir.all_paths():
208 if not path.is_file:
209 continue
210 if path.name.endswith(".so"):
211 detected_dep_requirements |= PERL_DEP_XS_MODULE
212 elif path.name.endswith(".pm"):
213 detected_dep_requirements |= pm_mode
215 for path, children in fs_root.walk():
216 if path.path == "./usr/share/doc":
217 children.clear()
218 continue
219 if (
220 not path.is_file
221 or not path.has_fs_path
222 or not (path.is_executable or path.name.endswith(".pl"))
223 ):
224 continue
226 interpreter = path.interpreter()
227 if interpreter is not None and interpreter.command_full_basename == "perl":
228 detected_dep_requirements |= PERL_DEP_PROGRAM
230 if not detected_dep_requirements:
231 return
232 dpackage = "perl"
233 # FIXME: Currently, dh_perl supports perl-base via manual toggle.
235 dependency = dpackage
236 if not (detected_dep_requirements & PERL_DEP_MA_ANY_INCOMPATIBLE_TYPES):
237 dependency += ":any"
239 if detected_dep_requirements & PERL_DEP_XS_MODULE:
240 dpkg_version = _dpkg_perl_version(dpackage)
241 dependency += f" (>= {dpkg_version})"
242 substvars.add_dependency("perl:Depends", dependency)
244 if detected_dep_requirements & (PERL_DEP_XS_MODULE | PERL_DEP_ARCH_PM_MODULE):
245 substvars.add_dependency("perl:Depends", perlxs_api_dependency())
248def usr_local_transformation(dctrl: BinaryPackage, fs_root: VirtualPath) -> None:
249 path = fs_root.lookup("./usr/local")
250 if path and any(path.iterdir()):
251 # There are two key issues:
252 # 1) Getting the generated maintscript carried on to the final maintscript
253 # 2) Making sure that manifest created directories do not trigger the "unused error".
254 _error(
255 f"Replacement of /usr/local paths is currently not supported in debputy (triggered by: {dctrl.name})."
256 )
259def _find_and_analyze_systemd_service_files(
260 fs_root: VirtualPath,
261 systemd_service_dir: Literal["system", "user"],
262) -> Iterable[VirtualPath]:
263 service_dirs = [
264 f"./usr/lib/systemd/{systemd_service_dir}",
265 f"./lib/systemd/{systemd_service_dir}",
266 ]
267 aliases: dict[str, list[str]] = collections.defaultdict(list)
268 seen = set()
269 all_files = []
271 for d in service_dirs:
272 system_dir = fs_root.lookup(d)
273 if not system_dir:
274 continue
275 for child in system_dir.iterdir():
276 if child.is_symlink:
277 dest = os.path.basename(child.readlink())
278 aliases[dest].append(child.name)
279 elif child.is_file and child.name not in seen:
280 seen.add(child.name)
281 all_files.append(child)
283 return all_files
286def detect_systemd_user_service_files(
287 dctrl: BinaryPackage,
288 fs_root: VirtualPath,
289) -> None:
290 for service_file in _find_and_analyze_systemd_service_files(fs_root, "user"):
291 _error(
292 f'Sorry, systemd user services files are not supported at the moment (saw "{service_file.path}"'
293 f" in {dctrl.name})"
294 )
297# Generally, this should match the release date of oldstable or oldoldstable
298_DCH_PRUNE_CUT_OFF_DATE = datetime.date(2019, 7, 6)
299_DCH_MIN_NUM_OF_ENTRIES = 4
302def _prune_dch_file(
303 package: BinaryPackage,
304 path: VirtualPath,
305 is_changelog: bool,
306 keep_versions: set[str] | None,
307 *,
308 trim: bool = True,
309) -> tuple[bool, set[str] | None]:
310 # TODO: Process `d/changelog` once
311 # Note we cannot assume that changelog_file is always `d/changelog` as you can have
312 # per-package changelogs.
313 with path.open() as fd:
314 dch = Changelog(fd)
315 shortened = False
316 important_entries = 0
317 binnmu_entries = []
318 if is_changelog:
319 kept_entries = []
320 for block in dch:
321 if block.other_pairs.get("binary-only", "no") == "yes":
322 # Always keep binNMU entries (they are always in the top) and they do not count
323 # towards our kept_entries limit
324 binnmu_entries.append(block)
325 continue
326 block_date = block.date
327 if block_date is None:
328 _error("The Debian changelog was missing date in sign off line")
329 try:
330 entry_date = datetime.datetime.strptime(
331 block_date, "%a, %d %b %Y %H:%M:%S %z"
332 ).date()
333 except ValueError:
334 _error(
335 f"Invalid date in the changelog entry for version {block.version}: {block_date!r} (Expected format: 'Thu, 26 Feb 2026 00:00:00 +0000')"
336 )
337 if (
338 trim
339 and entry_date < _DCH_PRUNE_CUT_OFF_DATE
340 and important_entries >= _DCH_MIN_NUM_OF_ENTRIES
341 ):
342 shortened = True
343 break
344 # Match debhelper in incrementing after the check.
345 important_entries += 1
346 kept_entries.append(block)
347 else:
348 assert keep_versions is not None
349 # The NEWS files should match the version for the dch to avoid lintian warnings.
350 # If that means we remove all entries in the NEWS file, then we delete the NEWS
351 # file (see #1021607)
352 kept_entries = [b for b in dch if b.version in keep_versions]
353 shortened = len(dch) > len(kept_entries)
354 if shortened and not kept_entries:
355 path.unlink()
356 return True, None
358 if not shortened and not binnmu_entries:
359 return False, None
361 parent_dir = assume_not_none(path.parent_dir)
363 with (
364 path.replace_fs_path_content() as fs_path,
365 open(fs_path, "w", encoding="utf-8") as fd,
366 ):
367 for entry in kept_entries:
368 fd.write(str(entry))
370 if is_changelog and shortened:
371 # For changelog (rather than NEWS) files, add a note about how to
372 # get the full version.
373 msg = textwrap.dedent(f"""\
374 # Older entries have been removed from this changelog.
375 # To read the complete changelog use `apt changelog {package.name}`.
376 """)
377 fd.write(msg)
379 if binnmu_entries:
380 if package.is_arch_all:
381 _error(
382 f"The package {package.name} is architecture all, but it is built during a binNMU. A binNMU build"
383 " must not include architecture all packages"
384 )
386 with (
387 parent_dir.add_file(
388 f"{path.name}.{package.resolved_architecture}"
389 ) as binnmu_changelog,
390 open(
391 binnmu_changelog.fs_path,
392 "w",
393 encoding="utf-8",
394 ) as binnmu_fd,
395 ):
396 for entry in binnmu_entries:
397 binnmu_fd.write(str(entry))
399 if not shortened:
400 return False, None
401 return True, {b.version for b in kept_entries}
404def fixup_debian_changelog_and_news_file(
405 dctrl: BinaryPackage,
406 fs_root: VirtualPath,
407 is_native: bool,
408 build_env: DebBuildOptionsAndProfiles,
409) -> None:
410 doc_dir = fs_root.lookup(f"./usr/share/doc/{dctrl.name}")
411 if not doc_dir:
412 return
413 changelog = doc_dir.get("changelog.Debian")
414 if changelog and is_native:
415 changelog.name = "changelog"
416 elif is_native:
417 changelog = doc_dir.get("changelog")
419 trim = "notrimdch" not in build_env.deb_build_options
421 kept_entries = None
422 pruned_changelog = False
423 if changelog and changelog.has_fs_path:
424 pruned_changelog, kept_entries = _prune_dch_file(
425 dctrl, changelog, True, None, trim=trim
426 )
428 if not trim:
429 return
431 news_file = doc_dir.get("NEWS.Debian")
432 if news_file and news_file.has_fs_path and pruned_changelog:
433 _prune_dch_file(dctrl, news_file, False, kept_entries)
436_UPSTREAM_CHANGELOG_SOURCE_DIRS = [
437 ".",
438 "doc",
439 "docs",
440]
441_UPSTREAM_CHANGELOG_NAMES = {
442 # The value is a priority to match the debhelper order.
443 # - The suffix weights heavier than the basename (because that is what debhelper did)
444 #
445 # We list the name/suffix in order of priority in the code. That makes it easier to
446 # see the priority directly, but it gives the "lowest" value to the most important items
447 f"{n}{s}": (sw, nw)
448 for (nw, n), (sw, s) in itertools.product(
449 enumerate(["changelog", "changes", "history"], start=1),
450 enumerate(["", ".txt", ".md", ".rst", ".org"], start=1),
451 )
452}
453_NONE_TUPLE = (None, (0, 0))
456def _detect_upstream_changelog(names: Iterable[str]) -> str | None:
457 matches = []
458 for name in names:
459 match_priority = _UPSTREAM_CHANGELOG_NAMES.get(name.lower())
460 if match_priority is not None:
461 matches.append((name, match_priority))
462 return min(matches, default=_NONE_TUPLE, key=operator.itemgetter(1))[0]
465def install_upstream_changelog(
466 dctrl_bin: BinaryPackage,
467 fs_root: InMemoryVirtualPathBase,
468 source_fs_root: VirtualPath,
469) -> None:
470 doc_dir = f"./usr/share/doc/{dctrl_bin.name}"
471 bdir = fs_root.lookup(doc_dir)
472 if bdir and not bdir.is_dir:
473 # "/usr/share/doc/foo -> bar" symlink. Avoid croaking on those per:
474 # https://salsa.debian.org/debian/debputy/-/issues/49
475 return
477 if bdir:
478 if bdir.get("changelog") or bdir.get("changelog.gz"):
479 # Upstream's build system already provided the changelog with the correct name.
480 # Accept that as the canonical one.
481 return
482 upstream_changelog = _detect_upstream_changelog(
483 p.name for p in bdir.iterdir() if p.is_file and p.has_fs_path and p.size > 0
484 )
485 if upstream_changelog:
486 p = bdir.lookup(upstream_changelog)
487 assert p is not None # Mostly as a typing hint
488 p.name = "changelog"
489 return
490 for dirname in _UPSTREAM_CHANGELOG_SOURCE_DIRS:
491 dir_path = source_fs_root.lookup(dirname)
492 if not dir_path or not dir_path.is_dir:
493 continue
494 changelog_name = _detect_upstream_changelog(
495 p.name
496 for p in dir_path.iterdir()
497 if p.is_file and p.has_fs_path and p.size > 0
498 )
499 if changelog_name:
500 if bdir is None: 500 ↛ 502line 500 didn't jump to line 502 because the condition on line 500 was always true
501 bdir = fs_root.mkdirs(doc_dir)
502 bdir.insert_file_from_fs_path(
503 "changelog",
504 dir_path[changelog_name].fs_path,
505 )
506 break
509@dataclasses.dataclass(slots=True)
510class _ElfInfo:
511 path: VirtualPath
512 fs_path: str
513 is_stripped: bool | None = None
514 build_id: str | None = None
515 dbgsym: InMemoryVirtualPathBase | None = None
518def _elf_static_lib_walk_filter(
519 fs_path: VirtualPath,
520 children: list[VP],
521) -> bool:
522 if (
523 fs_path.name == ".build-id"
524 and assume_not_none(fs_path.parent_dir).name == "debug"
525 ):
526 children.clear()
527 return False
528 # Deal with some special cases, where certain files are not supposed to be stripped in a given directory
529 if "debug/" in fs_path.path or fs_path.name.endswith("debug/"):
530 # FIXME: We need a way to opt out of this per #468333/#1016122
531 for so_file in (f for f in list(children) if f.name.endswith(".so")):
532 children.remove(so_file)
533 if "/guile/" in fs_path.path or fs_path.name == "guile":
534 for go_file in (f for f in list(children) if f.name.endswith(".go")):
535 children.remove(go_file)
536 return True
539@contextlib.contextmanager
540def _all_elf_files(fs_root: VirtualPath) -> Iterator[dict[str, _ElfInfo]]:
541 all_elf_files = find_all_elf_files(
542 fs_root,
543 walk_filter=_elf_static_lib_walk_filter,
544 )
545 if not all_elf_files:
546 yield {}
547 return
548 with ExitStack() as cm_stack:
549 resolved = (
550 (p, cm_stack.enter_context(p.replace_fs_path_content()))
551 for p in all_elf_files
552 )
553 elf_info = {
554 fs_path: _ElfInfo(
555 path=assume_not_none(fs_root.lookup(detached_path.path)),
556 fs_path=fs_path,
557 )
558 for detached_path, fs_path in resolved
559 }
560 _resolve_build_ids(elf_info)
561 yield elf_info
564def _find_all_static_libs(
565 fs_root: InMemoryVirtualPathBase,
566) -> Iterator[InMemoryVirtualPathBase]:
567 for path, children in fs_root.walk():
568 # Matching the logic of dh_strip for now.
569 if not _elf_static_lib_walk_filter(path, children):
570 continue
571 if not path.is_file:
572 continue
573 if path.name.startswith("lib") and path.name.endswith("_g.a"):
574 # _g.a are historically ignored. I do not remember why, but guessing the "_g" is
575 # an encoding of gcc's -g parameter into the filename (with -g meaning "I want debug
576 # symbols")
577 continue
578 if not path.has_fs_path:
579 continue
580 with path.open(byte_io=True) as fd:
581 magic = fd.read(8)
582 if magic not in (b"!<arch>\n", b"!<thin>\n"):
583 continue
584 # Maybe we should see if the first file looks like an index file.
585 # Three random .a samples suggests the index file is named "/"
586 # Not sure if we should skip past it and then do the ELF check or just assume
587 # that "index => static lib".
588 data = fd.read(1024 * 1024)
589 if b"\0" not in data and ELF_MAGIC not in data:
590 continue
591 yield path
594@contextlib.contextmanager
595def _all_static_libs(fs_root: InMemoryVirtualPathBase) -> Iterator[list[str]]:
596 all_static_libs = list(_find_all_static_libs(fs_root))
597 if not all_static_libs:
598 yield []
599 return
600 with ExitStack() as cm_stack:
601 resolved: list[str] = [
602 cm_stack.enter_context(p.replace_fs_path_content()) for p in all_static_libs
603 ]
604 yield resolved
607_FILE_BUILD_ID_RE = re.compile(rb"BuildID(?:\[\S+\])?=([A-Fa-f0-9]+)")
610def _resolve_build_ids(elf_info: dict[str, _ElfInfo]) -> None:
611 static_cmd = ["file", "-00", "-N"]
612 if detect_fakeroot():
613 static_cmd.append("--no-sandbox")
615 for cmd in xargs(static_cmd, (i.fs_path for i in elf_info.values())):
616 _info(f"Looking up build-ids via: {escape_shell(*cmd)}")
617 output = subprocess.check_output(cmd)
619 # Trailing "\0" gives an empty element in the end when splitting, so strip it out
620 lines = output.rstrip(b"\0").split(b"\0")
622 for fs_path_b, verdict in grouper(lines, 2, incomplete="strict"):
623 fs_path = fs_path_b.decode("utf-8")
624 info = elf_info[fs_path]
625 info.is_stripped = b"not stripped" not in verdict
626 m = _FILE_BUILD_ID_RE.search(verdict)
627 if m:
628 info.build_id = m.group(1).decode("utf-8")
631def _make_debug_file(
632 objcopy: str,
633 fs_path: str,
634 build_id: str,
635 dbgsym_fs_root: InMemoryVirtualPathBase,
636) -> InMemoryVirtualPathBase:
637 dbgsym_dirname = f"./usr/lib/debug/.build-id/{build_id[0:2]}/"
638 dbgsym_basename = f"{build_id[2:]}.debug"
639 dbgsym_dir = dbgsym_fs_root.mkdirs(dbgsym_dirname)
640 if dbgsym_basename in dbgsym_dir:
641 return dbgsym_dir[dbgsym_basename]
642 # objcopy is a pain and includes the basename verbatim when you do `--add-gnu-debuglink` without having an option
643 # to overwrite the physical basename. So we have to ensure that the physical basename matches the installed
644 # basename.
645 with dbgsym_dir.add_file(
646 dbgsym_basename,
647 unlink_if_exists=False,
648 fs_basename_matters=True,
649 subdir_key="dbgsym-build-ids",
650 ) as dbgsym:
651 try:
652 subprocess.check_call(
653 [
654 objcopy,
655 "--only-keep-debug",
656 "--compress-debug-sections",
657 fs_path,
658 dbgsym.fs_path,
659 ]
660 )
661 except subprocess.CalledProcessError:
662 full_command = (
663 f"{objcopy} --only-keep-debug --compress-debug-sections"
664 f" {escape_shell(fs_path, dbgsym.fs_path)}"
665 )
666 _error(
667 f"Attempting to create a .debug file failed. Please review the error message from {objcopy} to"
668 f" understand what went wrong. Full command was: {full_command}"
669 )
670 return dbgsym
673def _strip_binary(strip: str, options: list[str], paths: Iterable[str]) -> None:
674 # We assume the paths are obtained via `p.replace_fs_path_content()`,
675 # which is the case at the time of written and should remain so forever.
676 it = iter(paths)
677 first = next(it, None)
678 if first is None:
679 return
680 static_cmd = [strip]
681 static_cmd.extend(options)
683 for cmd in xargs(static_cmd, itertools.chain((first,), (f for f in it))):
684 _info(f"Removing unnecessary ELF debug info via: {escape_shell(*cmd)}")
685 try:
686 subprocess.check_call(
687 cmd,
688 stdin=subprocess.DEVNULL,
689 restore_signals=True,
690 )
691 except subprocess.CalledProcessError:
692 _error(
693 f"Attempting to remove ELF debug info failed. Please review the error from {strip} above"
694 f" understand what went wrong."
695 )
698def _attach_debug(
699 objcopy: str, elf_binary: VirtualPath, dbgsym: InMemoryVirtualPathBase
700) -> None:
701 dbgsym_fs_path: str
702 with dbgsym.replace_fs_path_content() as dbgsym_fs_path:
703 cmd = [objcopy, "--add-gnu-debuglink", dbgsym_fs_path, elf_binary.fs_path]
704 print_command(*cmd)
705 try:
706 subprocess.check_call(cmd)
707 except subprocess.CalledProcessError:
708 _error(
709 f"Attempting to attach ELF debug link to ELF binary failed. Please review the error from {objcopy}"
710 f" above understand what went wrong."
711 )
714@functools.lru_cache
715def _has_tool(tool: str) -> bool:
716 return shutil.which(tool) is not None
719def _run_dwz(
720 dctrl: BinaryPackage,
721 dbgsym_fs_root: InMemoryVirtualPathBase,
722 unstripped_elf_info: list[_ElfInfo],
723) -> None:
724 if not unstripped_elf_info or dctrl.is_udeb or not _has_tool("dwz"):
725 return
726 dwz_cmd = ["dwz"]
727 dwz_ma_dir_name = f"usr/lib/debug/.dwz/{dctrl.deb_multiarch}"
728 dwz_ma_basename = f"{dctrl.name}.debug"
729 multifile = f"{dwz_ma_dir_name}/{dwz_ma_basename}"
730 build_time_multifile = None
731 if len(unstripped_elf_info) > 1:
732 fs_content_dir = generated_content_dir()
733 fd, build_time_multifile = mkstemp(suffix=dwz_ma_basename, dir=fs_content_dir)
734 os.close(fd)
735 dwz_cmd.append(f"-m{build_time_multifile}")
736 dwz_cmd.append(f"-M/{multifile}")
738 # TODO: configuration for disabling multi-file and tweaking memory limits
740 dwz_cmd.extend(e.fs_path for e in unstripped_elf_info)
742 _info(f"Deduplicating ELF debug info via: {escape_shell(*dwz_cmd)}")
743 try:
744 subprocess.check_call(dwz_cmd)
745 except subprocess.CalledProcessError:
746 _error(
747 "Attempting to deduplicate ELF info via dwz failed. Please review the output from dwz above"
748 " to understand what went wrong."
749 )
750 if build_time_multifile is not None and os.stat(build_time_multifile).st_size > 0:
751 dwz_dir = dbgsym_fs_root.mkdirs(dwz_ma_dir_name)
752 dwz_dir.insert_file_from_fs_path(
753 dwz_ma_basename,
754 build_time_multifile,
755 mode=0o644,
756 require_copy_on_write=False,
757 follow_symlinks=False,
758 )
761def relocate_dwarves_into_dbgsym_packages(
762 dctrl: BinaryPackage,
763 package_fs_root: InMemoryVirtualPathBase,
764 dbgsym_fs_root: VirtualPath,
765 *,
766 run_dwz: bool = False,
767) -> list[str]:
768 # FIXME: hardlinks
769 with _all_static_libs(package_fs_root) as all_static_files:
770 if all_static_files:
771 strip = dctrl.cross_command("strip")
772 _strip_binary(
773 strip,
774 [
775 "--strip-debug",
776 "--remove-section=.comment",
777 "--remove-section=.note",
778 "--enable-deterministic-archives",
779 "-R",
780 ".gnu.lto_*",
781 "-R",
782 ".gnu.debuglto_*",
783 "-N",
784 "__gnu_lto_slim",
785 "-N",
786 "__gnu_lto_v1",
787 ],
788 all_static_files,
789 )
791 with _all_elf_files(package_fs_root) as all_elf_files:
792 if not all_elf_files:
793 return []
794 objcopy = dctrl.cross_command("objcopy")
795 strip = dctrl.cross_command("strip")
796 unstripped_elf_info = list(
797 e for e in all_elf_files.values() if not e.is_stripped
798 )
800 if run_dwz:
801 _run_dwz(dctrl, dbgsym_fs_root, unstripped_elf_info)
803 for elf_info in unstripped_elf_info:
804 elf_info.dbgsym = _make_debug_file(
805 objcopy,
806 elf_info.fs_path,
807 assume_not_none(elf_info.build_id),
808 dbgsym_fs_root,
809 )
811 # Note: When run strip, we do so also on already stripped ELF binaries because that is what debhelper does!
812 # Executables (defined by mode)
813 _strip_binary(
814 strip,
815 ["--remove-section=.comment", "--remove-section=.note"],
816 (i.fs_path for i in all_elf_files.values() if i.path.is_executable),
817 )
819 # Libraries (defined by mode)
820 _strip_binary(
821 strip,
822 ["--remove-section=.comment", "--remove-section=.note", "--strip-unneeded"],
823 (i.fs_path for i in all_elf_files.values() if not i.path.is_executable),
824 )
826 for elf_info in unstripped_elf_info:
827 _attach_debug(
828 objcopy,
829 assume_not_none(elf_info.path),
830 assume_not_none(elf_info.dbgsym),
831 )
833 # Set for uniqueness
834 all_debug_info = sorted(
835 {assume_not_none(i.build_id) for i in unstripped_elf_info}
836 )
838 dbgsym_doc_dir = dbgsym_fs_root.mkdirs("./usr/share/doc/")
839 dbgsym_doc_dir.add_symlink(f"{dctrl.name}-dbgsym", dctrl.name)
840 return all_debug_info
843def run_package_processors(
844 manifest: "HighLevelManifest",
845 package_metadata_context: PackageProcessingContext,
846 fs_root: VirtualPath,
847) -> None:
848 pppps = manifest.plugin_provided_feature_set.package_processors_in_order()
849 binary_package = package_metadata_context.binary_package
850 for pppp in pppps:
851 if not pppp.applies_to(binary_package):
852 continue
853 pppp.run_package_processor(fs_root, None, package_metadata_context)
856def cross_package_control_files(
857 package_data_table: PackageDataTable,
858 manifest: "HighLevelManifest",
859) -> None:
860 errors = []
861 combined_shlibs = ShlibsContent()
862 shlibs_dir = None
863 shlib_dirs: list[str] = []
864 shlibs_local = manifest.debian_dir.get("shlibs.local")
865 if shlibs_local and shlibs_local.is_file:
866 with shlibs_local.open() as fd:
867 combined_shlibs.add_entries_from_shlibs_file(fd)
869 debputy_plugin_metadata = manifest.plugin_provided_feature_set.plugin_data[
870 "debputy"
871 ]
873 for binary_package_data in package_data_table:
874 binary_package = binary_package_data.binary_package
875 if (
876 binary_package.is_arch_all
877 or not binary_package.should_be_acted_on
878 or binary_package.is_udeb
879 ):
880 continue
881 fs_root = binary_package_data.fs_root
882 package_state = manifest.package_state_for(binary_package.name)
883 related_udeb_package = (
884 binary_package_data.package_metadata_context.related_udeb_package
885 )
887 udeb_package_name = related_udeb_package.name if related_udeb_package else None
888 ctrl = binary_package_data.ctrl_creator.for_plugin(
889 debputy_plugin_metadata,
890 "compute_shlibs",
891 )
892 try:
893 soname_info_list = compute_shlibs(
894 binary_package,
895 binary_package_data.control_output_dir.fs_path,
896 fs_root,
897 manifest,
898 udeb_package_name,
899 ctrl,
900 package_state.reserved_packager_provided_files,
901 combined_shlibs,
902 )
903 except DebputyDpkgGensymbolsError as e:
904 errors.append(e.message)
905 else:
906 if soname_info_list:
907 if shlibs_dir is None:
908 shlibs_dir = generated_content_dir(
909 subdir_key="_shlibs_materialization_dir"
910 )
911 generate_shlib_dirs(
912 binary_package,
913 shlibs_dir,
914 soname_info_list,
915 shlib_dirs,
916 )
917 if errors:
918 for error in errors:
919 _warn(error)
920 _error("Stopping due to the errors above")
922 generated_shlibs_local = None
923 if combined_shlibs:
924 if shlibs_dir is None:
925 shlibs_dir = generated_content_dir(subdir_key="_shlibs_materialization_dir")
926 generated_shlibs_local = os.path.join(shlibs_dir, "shlibs.local")
927 with open(generated_shlibs_local, "w", encoding="utf-8") as fd:
928 combined_shlibs.write_to(fd)
929 _info(f"Generated {generated_shlibs_local} for dpkg-shlibdeps")
931 for binary_package_data in package_data_table:
932 binary_package = binary_package_data.binary_package
933 if binary_package.is_arch_all or not binary_package.should_be_acted_on:
934 continue
935 binary_package_data.ctrl_creator.shlibs_details = (
936 generated_shlibs_local,
937 shlib_dirs,
938 )
941def _relevant_service_definitions(
942 service_rule: ServiceRule,
943 service_managers: list[str] | frozenset[str],
944 by_service_manager_key: Mapping[
945 tuple[str, str, str, str], tuple[ServiceManagerDetails, ServiceDefinition[Any]]
946 ],
947 aliases: Mapping[str, Sequence[tuple[str, str, str, str]]],
948) -> Iterable[tuple[tuple[str, str, str, str], ServiceDefinition[Any]]]:
949 as_keys = (key for key in aliases[service_rule.service])
951 pending_queue = {
952 key
953 for key in as_keys
954 if key in by_service_manager_key
955 and service_rule.applies_to_service_manager(key[-1])
956 }
957 relevant_names: dict[tuple[str, str, str, str], ServiceDefinition[Any]] = {}
958 seen_keys = set()
960 if not pending_queue:
961 service_manager_names = ", ".join(sorted(service_managers))
962 _error(
963 f"No none of the service managers ({service_manager_names}) detected a service named"
964 f" {service_rule.service} (type: {service_rule.type_of_service}, scope: {service_rule.service_scope}),"
965 f" but the manifest definition at {service_rule.definition_source} requested that."
966 )
968 while pending_queue:
969 next_key = pending_queue.pop()
970 seen_keys.add(next_key)
971 _, definition = by_service_manager_key[next_key]
972 yield next_key, definition
973 for name in definition.names:
974 for target_key in aliases[name]:
975 if (
976 target_key not in seen_keys
977 and service_rule.applies_to_service_manager(target_key[-1])
978 ):
979 pending_queue.add(target_key)
981 return relevant_names.items()
984def handle_service_management(
985 binary_package_data: "BinaryPackageData",
986 manifest: "HighLevelManifest",
987 package_metadata_context: PackageProcessingContext,
988 fs_root: VirtualPath,
989 feature_set: PluginProvidedFeatureSet,
990) -> None:
992 by_service_manager_key = {}
993 aliases_by_name = collections.defaultdict(list)
995 state = manifest.package_state_for(binary_package_data.binary_package.name)
996 all_service_managers = list(feature_set.service_managers)
997 requested_service_rules = state.requested_service_rules
998 for requested_service_rule in requested_service_rules:
999 if not requested_service_rule.service_managers:
1000 continue
1001 for manager in requested_service_rule.service_managers:
1002 if manager not in feature_set.service_managers:
1003 # FIXME: Missing definition source; move to parsing.
1004 _error(
1005 f"Unknown service manager {manager} used at {requested_service_rule.definition_source}"
1006 )
1008 for service_manager_details in feature_set.service_managers.values():
1009 service_registry: ServiceRegistryImpl = ServiceRegistryImpl(
1010 service_manager_details
1011 )
1012 service_manager_details.service_detector(
1013 fs_root,
1014 service_registry,
1015 package_metadata_context,
1016 )
1018 service_definitions = service_registry.detected_services
1019 if not service_definitions:
1020 continue
1022 for plugin_provided_definition in service_definitions:
1023 key = (
1024 plugin_provided_definition.name,
1025 plugin_provided_definition.type_of_service,
1026 plugin_provided_definition.service_scope,
1027 service_manager_details.service_manager,
1028 )
1029 by_service_manager_key[key] = (
1030 service_manager_details,
1031 plugin_provided_definition,
1032 )
1034 for name in plugin_provided_definition.names:
1035 aliases_by_name[name].append(key)
1037 for requested_service_rule in requested_service_rules:
1038 explicit_service_managers = requested_service_rule.service_managers is not None
1039 related_service_managers = (
1040 requested_service_rule.service_managers or all_service_managers
1041 )
1042 seen_service_managers = set()
1043 for service_key, service_definition in _relevant_service_definitions(
1044 requested_service_rule,
1045 related_service_managers,
1046 by_service_manager_key,
1047 aliases_by_name,
1048 ):
1049 sm = service_key[-1]
1050 seen_service_managers.add(sm)
1051 by_service_manager_key[service_key] = (
1052 by_service_manager_key[service_key][0],
1053 requested_service_rule.apply_to_service_definition(service_definition),
1054 )
1055 if (
1056 explicit_service_managers
1057 and seen_service_managers != related_service_managers
1058 ):
1059 missing_sms = ", ".join(
1060 sorted(related_service_managers - seen_service_managers)
1061 )
1062 _error(
1063 f"The rule {requested_service_rule.definition_source} explicitly requested which service managers"
1064 f" it should apply to. However, the following service managers did not provide a service of that"
1065 f" name, type and scope: {missing_sms}. Please check the rule is correct and either provide the"
1066 f" missing service or update the definition match the relevant services."
1067 )
1069 per_service_manager = {}
1071 for (
1072 service_manager_details,
1073 plugin_provided_definition,
1074 ) in by_service_manager_key.values():
1075 service_manager = service_manager_details.service_manager
1076 if service_manager not in per_service_manager:
1077 per_service_manager[service_manager] = (
1078 service_manager_details,
1079 [plugin_provided_definition],
1080 )
1081 else:
1082 per_service_manager[service_manager][1].append(plugin_provided_definition)
1084 for (
1085 service_manager_details,
1086 final_service_definitions,
1087 ) in per_service_manager.values():
1088 ctrl = binary_package_data.ctrl_creator.for_plugin(
1089 service_manager_details.plugin_metadata,
1090 service_manager_details.service_manager,
1091 default_snippet_order="service",
1092 )
1093 _info(f"Applying {final_service_definitions}")
1094 service_manager_details.service_integrator(
1095 final_service_definitions,
1096 ctrl,
1097 package_metadata_context,
1098 )
1101def setup_control_files(
1102 binary_package_data: "BinaryPackageData",
1103 manifest: "HighLevelManifest",
1104 dbgsym_fs_root: VirtualPath,
1105 dbgsym_ids: list[str],
1106 package_metadata_context: PackageProcessingContext,
1107 *,
1108 allow_ctrl_file_management: bool = True,
1109) -> None:
1110 binary_package = package_metadata_context.binary_package
1111 control_output_dir = binary_package_data.control_output_dir
1112 control_output_fs_path = control_output_dir.fs_path
1113 fs_root = binary_package_data.fs_root
1114 package_state = manifest.package_state_for(binary_package.name)
1116 feature_set: PluginProvidedFeatureSet = manifest.plugin_provided_feature_set
1117 metadata_maintscript_detectors = feature_set.metadata_maintscript_detectors
1118 substvars = binary_package_data.substvars
1120 snippets = STD_CONTROL_SCRIPTS
1121 generated_triggers = list(binary_package_data.ctrl_creator.generated_triggers())
1123 if binary_package.is_udeb:
1124 # FIXME: Add missing udeb scripts
1125 snippets = ["postinst"]
1127 if allow_ctrl_file_management:
1128 process_alternatives(
1129 binary_package,
1130 fs_root,
1131 package_state.reserved_packager_provided_files,
1132 package_state.maintscript_snippets,
1133 substvars,
1134 )
1135 process_debconf_templates(
1136 binary_package,
1137 package_state.reserved_packager_provided_files,
1138 package_state.maintscript_snippets,
1139 substvars,
1140 control_output_fs_path,
1141 )
1143 handle_service_management(
1144 binary_package_data,
1145 manifest,
1146 package_metadata_context,
1147 fs_root,
1148 feature_set,
1149 )
1151 plugin_detector_definition: MetadataOrMaintscriptDetector
1152 for plugin_detector_definition in itertools.chain.from_iterable(
1153 metadata_maintscript_detectors.values()
1154 ):
1155 if not plugin_detector_definition.applies_to(binary_package):
1156 continue
1157 ctrl = binary_package_data.ctrl_creator.for_plugin(
1158 plugin_detector_definition.plugin_metadata,
1159 plugin_detector_definition.detector_id,
1160 )
1161 plugin_detector_definition.run_detector(
1162 fs_root, ctrl, package_metadata_context
1163 )
1165 for script in snippets:
1166 _generate_snippet(
1167 control_output_fs_path,
1168 script,
1169 package_state.maintscript_snippets,
1170 )
1172 else:
1173 state = manifest.package_state_for(binary_package_data.binary_package.name)
1174 if state.requested_service_rules:
1175 service_source = state.requested_service_rules[0].definition_source
1176 _error(
1177 f"Use of service definitions (such as {service_source}) is not supported in this integration mode"
1178 )
1179 for script, snippet_container in package_state.maintscript_snippets.items():
1180 for snippet in snippet_container.all_snippets():
1181 source = snippet.definition_source
1182 _error(
1183 f"This integration mode cannot use maintscript snippets"
1184 f' (since dh_installdeb has already been called). However, "{source}" triggered'
1185 f" a snippet for {script}. Please remove the offending definition if it is from"
1186 f" the manifest or file a bug if it is caused by a built-in rule."
1187 )
1189 for trigger in generated_triggers:
1190 source = f"{trigger.provider.plugin_name}:{trigger.provider_source_id}"
1191 _error(
1192 f"This integration mode must not generate triggers"
1193 f' (since dh_installdeb has already been called). However, "{source}" created'
1194 f" a trigger. Please remove the offending definition if it is from"
1195 f" the manifest or file a bug if it is caused by a built-in rule."
1196 )
1198 shlibdeps_definition = [
1199 d
1200 for d in metadata_maintscript_detectors["debputy"]
1201 if d.detector_id == "dpkg-shlibdeps"
1202 ][0]
1204 ctrl = binary_package_data.ctrl_creator.for_plugin(
1205 shlibdeps_definition.plugin_metadata,
1206 shlibdeps_definition.detector_id,
1207 )
1208 shlibdeps_definition.run_detector(fs_root, ctrl, package_metadata_context)
1210 dh_staging_dir = os.path.join("debian", binary_package.name, "DEBIAN")
1211 try:
1212 with os.scandir(dh_staging_dir) as it:
1213 existing_control_files = [
1214 f.path
1215 for f in it
1216 if f.is_file(follow_symlinks=False)
1217 and f.name not in ("control", "md5sums")
1218 ]
1219 except FileNotFoundError:
1220 existing_control_files = []
1222 if existing_control_files:
1223 cmd = ["cp", "-a"]
1224 cmd.extend(existing_control_files)
1225 cmd.append(control_output_fs_path)
1226 print_command(*cmd)
1227 subprocess.check_call(cmd)
1229 if binary_package.is_udeb:
1230 _generate_control_files(
1231 binary_package_data,
1232 package_state,
1233 control_output_dir,
1234 fs_root,
1235 substvars,
1236 # We never built udebs due to #797391, so skip over this information,
1237 # when creating the udeb
1238 None,
1239 None,
1240 )
1241 return
1243 if generated_triggers:
1244 assert allow_ctrl_file_management
1245 dest_file = os.path.join(control_output_fs_path, "triggers")
1246 with open(dest_file, "a", encoding="utf-8") as fd:
1247 fd.writelines(textwrap.dedent(f"""\
1248 # Added by {t.provider_source_id} from {t.provider.plugin_name}
1249 {t.dpkg_trigger_type} {t.dpkg_trigger_target}
1250 """) for t in generated_triggers)
1251 os.chmod(fd.fileno(), 0o644)
1253 if allow_ctrl_file_management:
1254 install_or_generate_conffiles(
1255 control_output_dir,
1256 fs_root,
1257 package_state.reserved_packager_provided_files,
1258 )
1260 _generate_control_files(
1261 binary_package_data,
1262 package_state,
1263 control_output_dir,
1264 fs_root,
1265 substvars,
1266 dbgsym_fs_root,
1267 dbgsym_ids,
1268 )
1271def _generate_snippet(
1272 control_output_dir: str,
1273 script: str,
1274 maintscript_snippets: dict[str, MaintscriptSnippetContainer],
1275) -> None:
1276 debputy_snippets = maintscript_snippets.get(script)
1277 if debputy_snippets is None:
1278 return
1279 reverse = script in ("prerm", "postrm")
1280 snippets = [
1281 debputy_snippets.generate_snippet(reverse=reverse),
1282 debputy_snippets.generate_snippet(snippet_order="service", reverse=reverse),
1283 ]
1284 if reverse:
1285 snippets = reversed(snippets)
1286 full_content = "".join(f"{s}\n" for s in filter(None, snippets))
1287 if not full_content:
1288 return
1289 filename = os.path.join(control_output_dir, script)
1290 with open(filename, "w") as fd:
1291 fd.write("#!/bin/sh\nset -e\n\n")
1292 fd.write(full_content)
1293 os.chmod(fd.fileno(), 0o755)
1296def _add_conffiles(
1297 ctrl_root: VirtualPathBase,
1298 conffile_matches: Iterable[VirtualPath],
1299) -> None:
1300 it = iter(conffile_matches)
1301 first = next(it, None)
1302 if first is None:
1303 return
1304 conffiles = itertools.chain([first], it)
1305 with ctrl_root.open_child("conffiles", "at") as fd:
1306 for conffile_match in conffiles:
1307 conffile = conffile_match.absolute
1308 assert conffile_match.is_file
1309 fd.write(f"{conffile}\n")
1312def _ensure_base_substvars_defined(substvars: FlushableSubstvars) -> None:
1313 for substvar in ("misc:Depends", "misc:Pre-Depends"):
1314 if substvar not in substvars:
1315 substvars[substvar] = ""
1318def compute_installed_size(fs_root: VirtualPath) -> int:
1319 """Emulate dpkg-gencontrol's code for computing the default Installed-Size"""
1320 size_in_kb = 0
1321 hard_links = set()
1322 for path in fs_root.all_paths():
1323 if path.is_symlink or path.is_file:
1324 try:
1325 # If it is a VirtualPathBase instance, the use its `.stat()` method
1326 # since it might have the stat cached as a minor optimization on disk
1327 # access. Other than that, the `os.lstat` fallback is sufficient.
1328 if isinstance(path, VirtualPathBase): 1328 ↛ 1331line 1328 didn't jump to line 1331 because the condition on line 1328 was always true
1329 st = path.stat()
1330 else:
1331 st = os.lstat(path.fs_path)
1332 if st.st_nlink > 1:
1333 hl_key = (st.st_dev, st.st_ino)
1334 if hl_key in hard_links:
1335 continue
1336 hard_links.add(hl_key)
1337 size = st.st_size
1338 except PureVirtualPathError:
1339 # We just assume it is not a hard link when the path is purely virtual
1340 size = path.size
1341 path_size = (size + 1023) // 1024
1342 else:
1343 path_size = 1
1344 size_in_kb += path_size
1345 return size_in_kb
1348def _generate_dbgsym_control_file_if_relevant(
1349 binary_package: BinaryPackage,
1350 dbgsym_fs_root: VirtualPath,
1351 dbgsym_control_dir: FSControlRootDir,
1352 dbgsym_ids: str,
1353 multi_arch: str | None,
1354 dctrl: str,
1355 extra_common_params: Sequence[str],
1356) -> None:
1357 section = binary_package.archive_section
1358 component = ""
1359 extra_params = []
1360 if section is not None and "/" in section and not section.startswith("main/"):
1361 component = section.split("/", 1)[1] + "/"
1362 if multi_arch != "same":
1363 extra_params.append("-UMulti-Arch")
1364 else:
1365 extra_params.append(f"-DMulti-Arch={multi_arch}")
1366 extra_params.append("-UReplaces")
1367 extra_params.append("-UBreaks")
1368 dbgsym_control_fs_path = dbgsym_control_dir.fs_path
1369 ensure_dir(dbgsym_control_fs_path)
1370 # Pass it via cmd-line to make it more visible that we are providing the
1371 # value. It also prevents the dbgsym package from picking up this value.
1372 total_size = compute_installed_size(dbgsym_fs_root) + compute_installed_size(
1373 dbgsym_control_dir
1374 )
1375 extra_params.append(f"-VInstalled-Size={total_size}")
1376 extra_params.extend(extra_common_params)
1378 package = binary_package.name
1379 package_selector = (
1380 binary_package.name
1381 if dctrl == "debian/control"
1382 else f"{binary_package.name}-dbgsym"
1383 )
1384 dpkg_cmd = [
1385 "dpkg-gencontrol",
1386 f"-p{package_selector}",
1387 # FIXME: Support d/<pkg>.changelog at some point.
1388 "-ldebian/changelog",
1389 "-T/dev/null",
1390 f"-c{dctrl}",
1391 f"-O{dbgsym_control_fs_path}/control",
1392 # Use a placeholder for -P to ensure failure if we forgot to override a path parameter
1393 "-P/non-existent",
1394 f"-DPackage={package}-dbgsym",
1395 "-DDepends=" + package + " (= ${binary:Version})",
1396 f"-DDescription=debug symbols for {package}",
1397 f"-DSection={component}debug",
1398 f"-DBuild-Ids={dbgsym_ids}",
1399 "-UPre-Depends",
1400 "-URecommends",
1401 "-USuggests",
1402 "-UEnhances",
1403 "-UProvides",
1404 "-UEssential",
1405 "-UConflicts",
1406 "-DPriority=optional",
1407 "-UHomepage",
1408 "-UImportant",
1409 "-UBuilt-Using",
1410 "-UStatic-Built-Using",
1411 "-DAuto-Built-Package=debug-symbols",
1412 "-UProtected",
1413 *extra_params,
1414 ]
1415 print_command(*dpkg_cmd)
1416 try:
1417 subprocess.check_call(dpkg_cmd)
1418 except subprocess.CalledProcessError:
1419 _error(
1420 f"Attempting to generate DEBIAN/control file for {package}-dbgsym failed. Please review the output from "
1421 " dpkg-gencontrol above to understand what went wrong."
1422 )
1423 os.chmod(os.path.join(dbgsym_control_fs_path, "control"), 0o644)
1426def _all_parent_directories_of(directories: Iterable[str]) -> set[str]:
1427 result = {"."}
1428 for path in directories:
1429 current = os.path.dirname(path)
1430 while current and current not in result:
1431 result.add(current)
1432 current = os.path.dirname(current)
1433 return result
1436def _compute_multi_arch_for_arch_all_doc(
1437 binary_package: BinaryPackage,
1438 fs_root: InMemoryVirtualPathBase,
1439) -> str | None:
1440 if not binary_package.name.endswith(("-doc", "-docs")):
1441 # We limit by package name, since there are tricks involving a `Multi-Arch: no` depending on a
1442 # `Multi-Arch: same` to emulate `Multi-Arch: allowed`. Said `Multi-Arch: no` can have no contents.
1443 #
1444 # That case seems unrealistic for -doc/-docs packages and accordingly the limitation here.
1445 return None
1446 acceptable_no_descend_paths = {
1447 "./usr/share/doc",
1448 }
1449 acceptable_files = {f"./usr/share/lintian/overrides/{binary_package.name}"}
1450 if _any_unacceptable_paths(
1451 fs_root,
1452 acceptable_no_descend_paths=acceptable_no_descend_paths,
1453 acceptable_files=acceptable_files,
1454 ):
1455 return None
1456 return "foreign"
1459def _any_unacceptable_paths(
1460 fs_root: InMemoryVirtualPathBase,
1461 *,
1462 acceptable_no_descend_paths: list[str] | AbstractSet[str] = frozenset(),
1463 acceptable_files: list[str] | AbstractSet[str] = frozenset(),
1464) -> bool:
1465 acceptable_intermediate_dirs = _all_parent_directories_of(
1466 itertools.chain(acceptable_no_descend_paths, acceptable_files)
1467 )
1468 for fs_path, children in fs_root.walk():
1469 path = fs_path.path
1470 if path in acceptable_no_descend_paths:
1471 children.clear()
1472 continue
1473 if path in acceptable_intermediate_dirs or path in acceptable_files:
1474 continue
1475 return True
1476 return False
1479def auto_compute_multi_arch(
1480 binary_package: BinaryPackage,
1481 control_output_dir: VirtualPath,
1482 fs_root: InMemoryVirtualPathBase,
1483) -> str | None:
1484 resolved_arch = binary_package.resolved_architecture
1485 if any(
1486 script
1487 for script in ALL_CONTROL_SCRIPTS
1488 if (p := control_output_dir.get(script)) is not None and p.is_file
1489 ):
1490 return None
1492 if resolved_arch == "all":
1493 return _compute_multi_arch_for_arch_all_doc(binary_package, fs_root)
1495 resolved_multiarch = binary_package.deb_multiarch
1496 assert resolved_arch != "all"
1497 acceptable_no_descend_paths = {
1498 f"./usr/lib/{resolved_multiarch}",
1499 f"./usr/include/{resolved_multiarch}",
1500 }
1501 acceptable_files = {
1502 f"./usr/share/doc/{binary_package.name}/{basename}"
1503 for basename in (
1504 "copyright",
1505 "changelog.gz",
1506 "changelog.Debian.gz",
1507 f"changelog.Debian.{resolved_arch}.gz",
1508 "NEWS.Debian",
1509 "NEWS.Debian.gz",
1510 "README.Debian",
1511 "README.Debian.gz",
1512 )
1513 }
1515 # Note that the lintian-overrides file is deliberately omitted from the allow-list. We would have to know that the
1516 # override does not use architecture segments. With pure debputy, this is guaranteed (debputy
1517 # does not allow lintian-overrides with architecture segment). However, with a mixed debhelper + debputy,
1518 # `dh_lintian` allows it with compat 13 or older.
1520 if _any_unacceptable_paths(
1521 fs_root,
1522 acceptable_no_descend_paths=acceptable_no_descend_paths,
1523 acceptable_files=acceptable_files,
1524 ):
1525 return None
1527 return "same"
1530@functools.lru_cache
1531def _has_t64_enabled() -> bool:
1532 try:
1533 output = subprocess.check_output(
1534 ["dpkg-buildflags", "--query-features", "abi"]
1535 ).decode()
1536 except (subprocess.CalledProcessError, FileNotFoundError):
1537 return False
1539 for stanza in Deb822.iter_paragraphs(output):
1540 if stanza.get("Feature") == "time64" and stanza.get("Enabled") == "yes":
1541 return True
1542 return False
1545def _t64_migration_substvar(
1546 binary_package: BinaryPackage,
1547 control_output_dir: VirtualPath,
1548 substvars: FlushableSubstvars,
1549) -> None:
1550 name = binary_package.name
1551 compat_name = binary_package.fields.get("X-Time64-Compat")
1552 if compat_name is None and not _T64_REGEX.match(name):
1553 return
1555 if not any(
1556 p.is_file
1557 for n in ["symbols", "shlibs"]
1558 if (p := control_output_dir.get(n)) is not None
1559 ):
1560 return
1562 if compat_name is None:
1563 compat_name = name.replace("t64", "", 1)
1564 if compat_name == name:
1565 raise AssertionError(
1566 f"Failed to derive a t64 compat name for {name}. Please file a bug against debputy."
1567 " As a work around, you can explicitly provide a X-Time64-Compat header in debian/control"
1568 " where you specify the desired compat name."
1569 )
1571 arch_bits = binary_package.package_deb_architecture_variable("ARCH_BITS")
1573 if arch_bits != "32" or not _has_t64_enabled():
1574 substvars.add_dependency(
1575 _T64_PROVIDES,
1576 f"{compat_name} (= ${ binary:Version} )",
1577 )
1578 elif _T64_PROVIDES not in substvars:
1579 substvars[_T64_PROVIDES] = ""
1582@functools.lru_cache
1583def dpkg_field_list_pkg_dep() -> Sequence[str]:
1584 try:
1585 output = subprocess.check_output(
1586 [
1587 "perl",
1588 "-MDpkg::Control::Fields",
1589 "-e",
1590 r'print "$_\n" for field_list_pkg_dep',
1591 ]
1592 )
1593 except (FileNotFoundError, subprocess.CalledProcessError):
1594 _error("Could not run perl -MDpkg::Control::Fields to get a list of fields")
1595 return output.decode("utf-8").splitlines(keepends=False)
1598_SUBSTVARS_FIELDS_NOT_SUPPORTED_BY_DPKG = {
1599 "Commands",
1600}
1603@functools.lru_cache
1604def all_auto_substvars() -> Sequence[str]:
1605 result = [x for x in dpkg_field_list_pkg_dep()]
1606 result.extend(_SUBSTVARS_FIELDS_NOT_SUPPORTED_BY_DPKG)
1607 return tuple(result)
1610def _handle_auto_substvars(
1611 source: SourcePackage,
1612 dctrl_file: BinaryPackage,
1613 substvars: FlushableSubstvars,
1614 has_dbgsym: bool,
1615) -> str | None:
1616 auto_substvars_fields = all_auto_substvars()
1617 auto_substvars_fields_lc = {x.lower(): x for x in auto_substvars_fields}
1618 substvar_fields = collections.defaultdict(set)
1619 needs_dbgsym_stanza = False
1620 for substvar_name, substvar in substvars.as_substvar.items():
1621 if ":" not in substvar_name:
1622 continue
1623 if substvar.assignment_operator in ("$=", "!="):
1624 # Will create incorrect results if there is a dbgsym and we do nothing
1625 needs_dbgsym_stanza = True
1627 if substvar.assignment_operator == "$=":
1628 # Automatically handled; no need for manual merging.
1629 continue
1630 _, field = substvar_name.rsplit(":", 1)
1631 field_lc = field.lower()
1632 if field_lc not in auto_substvars_fields_lc:
1633 continue
1634 substvar_fields[field_lc].add("${" + substvar_name + "}")
1636 if not has_dbgsym:
1637 needs_dbgsym_stanza = False
1639 if not substvar_fields and not needs_dbgsym_stanza:
1640 return None
1642 replacement_stanza = debian.deb822.Deb822(dctrl_file.fields)
1644 for field_name in auto_substvars_fields:
1645 field_name_lc = field_name.lower()
1646 addendum = substvar_fields.get(field_name_lc)
1647 if addendum is None:
1648 # No merging required
1649 continue
1650 substvars_part = ", ".join(sorted(addendum))
1651 existing_value = replacement_stanza.get(field_name)
1653 if existing_value is None or existing_value.isspace():
1654 final_value = substvars_part
1655 else:
1656 existing_value = existing_value.rstrip().rstrip(",")
1657 final_value = f"{existing_value}, {substvars_part}"
1658 replacement_stanza[field_name] = final_value
1659 canonical_field_name = auto_substvars_fields_lc.get(field_name_lc)
1660 # If `dpkg` does not know the field, we need to inject `XB-` in front
1661 # of it.
1662 if (
1663 canonical_field_name
1664 and canonical_field_name in _SUBSTVARS_FIELDS_NOT_SUPPORTED_BY_DPKG
1665 ):
1666 replacement_stanza[f"XB-{canonical_field_name}"] = replacement_stanza[
1667 field_name
1668 ]
1669 del replacement_stanza[field_name]
1671 with suppress(KeyError):
1672 replacement_stanza.order_last("Description")
1674 tmpdir = generated_content_dir(package=dctrl_file)
1675 with tempfile.NamedTemporaryFile(
1676 mode="wb",
1677 dir=tmpdir,
1678 suffix="__DEBIAN_control",
1679 delete=False,
1680 ) as fd:
1681 try:
1682 cast("Any", source.fields).dump(fd)
1683 except AttributeError:
1684 debian.deb822.Deb822(source.fields).dump(fd)
1685 fd.write(b"\n")
1686 replacement_stanza.dump(fd)
1688 if has_dbgsym:
1689 # Minimal stanza to avoid substvars warnings. Most fields are still set
1690 # via -D.
1691 dbgsym_stanza = Deb822()
1692 dbgsym_stanza["Package"] = f"{dctrl_file.name}-dbgsym"
1693 dbgsym_stanza["Architecture"] = dctrl_file.fields["Architecture"]
1694 dbgsym_stanza["Description"] = f"debug symbols for {dctrl_file.name}"
1695 fd.write(b"\n")
1696 dbgsym_stanza.dump(fd)
1698 return fd.name
1701def _generate_control_files(
1702 binary_package_data: "BinaryPackageData",
1703 package_state: "PackageTransformationDefinition",
1704 control_output_dir: FSControlRootDir,
1705 fs_root: InMemoryVirtualPathBase,
1706 substvars: FlushableSubstvars,
1707 dbgsym_root_fs: VirtualPath | None,
1708 dbgsym_build_ids: list[str] | None,
1709) -> None:
1710 binary_package = binary_package_data.binary_package
1711 source_package = binary_package_data.source_package
1712 package_name = binary_package.name
1713 extra_common_params = []
1714 extra_params_specific = []
1715 _ensure_base_substvars_defined(substvars)
1716 if "Installed-Size" not in substvars:
1717 # Pass it via cmd-line to make it more visible that we are providing the
1718 # value. It also prevents the dbgsym package from picking up this value.
1719 total_size = compute_installed_size(fs_root) + compute_installed_size(
1720 control_output_dir
1721 )
1722 extra_params_specific.append(f"-VInstalled-Size={total_size}")
1724 ma_value = binary_package.fields.get("Multi-Arch")
1725 if not binary_package.is_udeb and ma_value is None:
1726 ma_value = auto_compute_multi_arch(binary_package, control_output_dir, fs_root)
1727 if ma_value is not None:
1728 _info(
1729 f'The package "{binary_package.name}" looks like it should be "Multi-Arch: {ma_value}" based'
1730 ' on the contents and there is no explicit "Multi-Arch" field. Setting the Multi-Arch field'
1731 ' accordingly in the binary. If this auto-correction is wrong, please add "Multi-Arch: no" to the'
1732 ' relevant part of "debian/control" to disable this feature.'
1733 )
1734 # We want this to apply to the `-dbgsym` package as well to avoid
1735 # lintian `debug-package-for-multi-arch-same-pkg-not-coinstallable`
1736 extra_common_params.append(f"-DMulti-Arch={ma_value}")
1737 elif ma_value == "no":
1738 extra_common_params.append("-UMulti-Arch")
1740 dbgsym_ids = " ".join(dbgsym_build_ids) if dbgsym_build_ids else ""
1741 if package_state.binary_version is not None:
1742 extra_common_params.append(f"-v{package_state.binary_version}")
1744 _t64_migration_substvar(binary_package, control_output_dir, substvars)
1746 with substvars.flush() as flushed_substvars:
1747 has_dbgsym = dbgsym_root_fs is not None and any(
1748 f for f in dbgsym_root_fs.all_paths() if f.is_file
1749 )
1750 dctrl_file = _handle_auto_substvars(
1751 source_package,
1752 binary_package,
1753 substvars,
1754 has_dbgsym,
1755 )
1756 if dctrl_file is None:
1757 dctrl_file = "debian/control"
1759 if has_dbgsym:
1760 assert dbgsym_root_fs is not None # mypy hint
1761 dbgsym_ctrl_dir = binary_package_data.dbgsym_info.dbgsym_ctrl_dir
1762 _generate_dbgsym_control_file_if_relevant(
1763 binary_package,
1764 dbgsym_root_fs,
1765 dbgsym_ctrl_dir,
1766 dbgsym_ids,
1767 ma_value,
1768 dctrl_file,
1769 extra_common_params,
1770 )
1771 generate_md5sums_file(
1772 dbgsym_ctrl_dir,
1773 dbgsym_root_fs,
1774 )
1775 elif dbgsym_ids:
1776 extra_common_params.append(f"-DBuild-Ids={dbgsym_ids}")
1778 ctrl_file = os.path.join(control_output_dir.fs_path, "control")
1779 dpkg_cmd = [
1780 "dpkg-gencontrol",
1781 f"-p{package_name}",
1782 # FIXME: Support d/<pkg>.changelog at some point.
1783 "-ldebian/changelog",
1784 f"-c{dctrl_file}",
1785 f"-T{flushed_substvars}",
1786 f"-O{ctrl_file}",
1787 # Use a placeholder for -P to ensure failure if we forgot to override a path parameter
1788 "-P/non-existent",
1789 *extra_common_params,
1790 *extra_params_specific,
1791 ]
1792 print_command(*dpkg_cmd)
1793 try:
1794 subprocess.check_call(dpkg_cmd)
1795 except subprocess.CalledProcessError:
1796 _error(
1797 f"Attempting to generate DEBIAN/control file for {package_name} failed. Please review the output from "
1798 " dpkg-gencontrol above to understand what went wrong."
1799 )
1800 os.chmod(ctrl_file, 0o644)
1802 if not binary_package.is_udeb:
1803 generate_md5sums_file(control_output_dir, fs_root)