tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

sysroot_creator.py (27403B)


      1 #!/usr/bin/env python3
      2 # Copyright 2023 The Chromium Authors
      3 # Use of this source code is governed by a BSD-style license that can be
      4 # found in the LICENSE file.
      5 """
      6 This script is used to build Debian sysroot images for building Chromium.
      7 """
      8 
      9 import argparse
     10 import collections
     11 import hashlib
     12 import lzma
     13 import os
     14 import re
     15 import shutil
     16 import subprocess
     17 import tempfile
     18 import time
     19 
     20 import requests
     21 import reversion_glibc
     22 
     23 DISTRO = "debian"
     24 RELEASE = "bullseye"
     25 
     26 # This number is appended to the sysroot key to cause full rebuilds.  It
     27 # should be incremented when removing packages or patching existing packages.
     28 # It should not be incremented when adding packages.
     29 SYSROOT_RELEASE = 1
     30 
     31 SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
     32 
     33 CHROME_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, "..", "..", ".."))
     34 BUILD_DIR = os.path.join(CHROME_DIR, "out", "sysroot-build", RELEASE)
     35 
     36 # gpg keyring file generated using generate_keyring.sh
     37 KEYRING_FILE = os.path.join(SCRIPT_DIR, "keyring.gpg")
     38 
     39 ARCHIVE_TIMESTAMP = "20250129T203412Z"
     40 
     41 ARCHIVE_URL = f"https://snapshot.debian.org/archive/debian/{ARCHIVE_TIMESTAMP}/"
     42 APT_SOURCES_LIST = [
     43    # Debian 12 (Bookworm) is needed for GTK4.  It should be kept before
     44    # bullseye so that bullseye takes precedence.
     45    ("bookworm", ["main"]),
     46    ("bookworm-updates", ["main"]),
     47    # This mimics a sources.list from bullseye.
     48    ("bullseye", ["main", "contrib", "non-free"]),
     49    ("bullseye-updates", ["main", "contrib", "non-free"]),
     50    ("bullseye-backports", ["main", "contrib", "non-free"]),
     51 ]
     52 
     53 TRIPLES = {
     54    "amd64": "x86_64-linux-gnu",
     55    "i386": "i386-linux-gnu",
     56    "armhf": "arm-linux-gnueabihf",
     57    "arm64": "aarch64-linux-gnu",
     58    "mipsel": "mipsel-linux-gnu",
     59    "mips64el": "mips64el-linux-gnuabi64",
     60    "ppc64el": "powerpc64le-linux-gnu",
     61 }
     62 
     63 REQUIRED_TOOLS = [
     64    "dpkg-deb",
     65    "file",
     66    "gpgv",
     67    "readelf",
     68    "tar",
     69    "xz",
     70 ]
     71 
     72 # Package configuration
     73 PACKAGES_EXT = "xz"
     74 RELEASE_FILE = "Release"
     75 RELEASE_FILE_GPG = "Release.gpg"
     76 
     77 # List of development packages. Dependencies are automatically included.
     78 DEBIAN_PACKAGES = [
     79    "libasound2-dev",
     80    "libavformat-dev",
     81    "libbluetooth-dev",
     82    "libc6-dev",
     83    "libcap-dev",
     84    "libcolord-dev",
     85    "libcups2-dev",
     86    "libcupsimage2-dev",
     87    "libcurl4-gnutls-dev",
     88    "libdbusmenu-glib-dev",
     89    "libdeflate-dev",
     90    "libelf-dev",
     91    "libflac-dev",
     92    "libgbm-dev",
     93    "libgcrypt20-dev",
     94    "libgnutls28-dev",
     95    "libgtk-3-dev",
     96    "libgtk-4-dev",
     97    "libinput-dev",
     98    "libjbig-dev",
     99    "libjpeg-dev",
    100    "libjsoncpp-dev",
    101    "libkrb5-dev",
    102    "liblcms2-dev",
    103    "liblzma-dev",
    104    "libminizip-dev",
    105    "libmtdev-dev",
    106    "libncurses-dev",
    107    "libnss3-dev",
    108    "libopus-dev",
    109    "libpam0g-dev",
    110    "libpci-dev",
    111    "libpipewire-0.3-dev",
    112    "libpulse-dev",
    113    "libre2-dev",
    114    "libsnappy-dev",
    115    "libspeechd-dev",
    116    "libssl-dev",
    117    "libsystemd-dev",
    118    "libtiff-dev",
    119    "libutempter-dev",
    120    "libva-dev",
    121    "libvpx-dev",
    122    "libwayland-egl-backend-dev",
    123    "libwebp-dev",
    124    "libx11-xcb-dev",
    125    "libxcb-dri2-0-dev",
    126    "libxcb-dri3-dev",
    127    "libxcb-glx0-dev",
    128    "libxcb-image0-dev",
    129    "libxcb-present-dev",
    130    "libxcb-render-util0-dev",
    131    "libxcb-util-dev",
    132    "libxshmfence-dev",
    133    "libxslt1-dev",
    134    "libxss-dev",
    135    "libxt-dev",
    136    "libxxf86vm-dev",
    137    "mesa-common-dev",
    138    "qt6-base-dev",
    139    "qtbase5-dev",
    140    "valgrind",
    141 ]
    142 
    143 
    144 def banner(message: str) -> None:
    145    print("#" * 70)
    146    print(message)
    147    print("#" * 70)
    148 
    149 
    150 def sub_banner(message: str) -> None:
    151    print("-" * 70)
    152    print(message)
    153    print("-" * 70)
    154 
    155 
    156 def hash_file(hasher, file_name: str) -> str:
    157    with open(file_name, "rb") as f:
    158        while chunk := f.read(8192):
    159            hasher.update(chunk)
    160    return hasher.hexdigest()
    161 
    162 
    163 def atomic_copyfile(source: str, destination: str) -> None:
    164    dest_dir = os.path.dirname(destination)
    165    with tempfile.NamedTemporaryFile(mode="wb", delete=False,
    166                                     dir=dest_dir) as temp_file:
    167        temp_filename = temp_file.name
    168    shutil.copyfile(source, temp_filename)
    169    os.rename(temp_filename, destination)
    170 
    171 
    172 def download_or_copy_non_unique_filename(url: str, dest: str) -> None:
    173    """
    174    Downloads a file from a given URL to a destination with a unique filename,
    175    based on the SHA-256 hash of the URL.
    176    """
    177    hash_digest = hashlib.sha256(url.encode()).hexdigest()
    178    unique_dest = f"{dest}.{hash_digest}"
    179    download_or_copy(url, unique_dest)
    180    atomic_copyfile(unique_dest, dest)
    181 
    182 
    183 def download_or_copy(source: str, destination: str) -> None:
    184    """
    185    Downloads a file from the given URL or copies it from a local path to the
    186    specified destination.
    187    """
    188    if os.path.exists(destination):
    189        print(f"{destination} already in place")
    190        return
    191 
    192    if source.startswith(("http://", "https://")):
    193        download_file(source, destination)
    194    else:
    195        atomic_copyfile(source, destination)
    196 
    197 
    198 def download_file(url: str, dest: str, retries=5) -> None:
    199    """
    200    Downloads a file from a URL to a specified destination with retry logic,
    201    directory creation, and atomic write.
    202    """
    203    print(f"Downloading from {url} -> {dest}")
    204    # Create directories if they don't exist
    205    os.makedirs(os.path.dirname(dest), exist_ok=True)
    206 
    207    for attempt in range(retries):
    208        try:
    209            with requests.get(url, stream=True) as response:
    210                response.raise_for_status()
    211 
    212                # Use a temporary file to write data
    213                with tempfile.NamedTemporaryFile(
    214                        mode="wb", delete=False,
    215                        dir=os.path.dirname(dest)) as temp_file:
    216                    for chunk in response.iter_content(chunk_size=8192):
    217                        temp_file.write(chunk)
    218 
    219                # Rename temporary file to destination file
    220                os.rename(temp_file.name, dest)
    221                print(f"Downloaded {dest}")
    222                break
    223 
    224        except requests.RequestException as e:
    225            print(f"Attempt {attempt} failed: {e}")
    226            # Exponential back-off
    227            time.sleep(2**attempt)
    228    else:
    229        raise Exception(f"Failed to download file after {retries} attempts")
    230 
    231 
    232 def sanity_check() -> None:
    233    """
    234    Performs sanity checks to ensure the environment is correctly set up.
    235    """
    236    banner("Sanity Checks")
    237 
    238    # Determine the Chrome build directory
    239    os.makedirs(BUILD_DIR, exist_ok=True)
    240    print(f"Using build directory: {BUILD_DIR}")
    241 
    242    # Check for required tools
    243    missing = [tool for tool in REQUIRED_TOOLS if not shutil.which(tool)]
    244    if missing:
    245        raise Exception(f"Required tools not found: {', '.join(missing)}")
    246 
    247 
    248 def clear_install_dir(install_root: str) -> None:
    249    if os.path.exists(install_root):
    250        shutil.rmtree(install_root)
    251    os.makedirs(install_root)
    252 
    253 
    254 def create_tarball(install_root: str, arch: str) -> None:
    255    tarball_path = os.path.join(BUILD_DIR,
    256                                f"{DISTRO}_{RELEASE}_{arch}_sysroot.tar.xz")
    257    banner("Creating tarball " + tarball_path)
    258    command = [
    259        "tar",
    260        "--owner=0",
    261        "--group=0",
    262        "--numeric-owner",
    263        "--sort=name",
    264        "--no-xattrs",
    265        "-I",
    266        "xz -z9 -T0 --lzma2='dict=256MiB'",
    267        "-cf",
    268        tarball_path,
    269        "-C",
    270        install_root,
    271        ".",
    272    ]
    273    subprocess.run(command, check=True)
    274 
    275 
    276 def generate_package_list_dist_repo(arch: str, dist: str,
    277                                    repo_name: str) -> list[dict[str, str]]:
    278    repo_basedir = f"{ARCHIVE_URL}/dists/{dist}"
    279    package_list = f"{BUILD_DIR}/Packages.{dist}_{repo_name}_{arch}"
    280    package_list = f"{package_list}.{PACKAGES_EXT}"
    281    package_file_arch = f"{repo_name}/binary-{arch}/Packages.{PACKAGES_EXT}"
    282    package_list_arch = f"{repo_basedir}/{package_file_arch}"
    283 
    284    download_or_copy_non_unique_filename(package_list_arch, package_list)
    285    verify_package_listing(package_file_arch, package_list, dist)
    286 
    287    with lzma.open(package_list, "rt") as src:
    288        return [
    289            dict(
    290                line.split(": ", 1) for line in package_meta.splitlines()
    291                if not line.startswith(" "))
    292            for package_meta in src.read().split("\n\n") if package_meta
    293        ]
    294 
    295 
    296 def generate_package_list(arch: str) -> dict[str, str]:
    297    # Workaround for some misconfigured package dependencies.
    298    BROKEN_DEPS = {
    299        "libgcc1",
    300        "qt6-base-abi",
    301    }
    302 
    303    package_meta = {}
    304    for dist, repos in APT_SOURCES_LIST:
    305        for repo_name in repos:
    306            for meta in generate_package_list_dist_repo(arch, dist, repo_name):
    307                package_meta[meta["Package"]] = meta
    308                if "Provides" not in meta:
    309                    continue
    310                for provides in meta["Provides"].split(", "):
    311                    if provides in package_meta:
    312                        continue
    313                    package_meta[provides] = meta
    314 
    315    def add_package_dependencies(package: str) -> None:
    316        if package in BROKEN_DEPS:
    317            return
    318        meta = package_meta[package]
    319        url = ARCHIVE_URL + meta["Filename"]
    320        if url in package_dict:
    321            return
    322        package_dict[url] = meta["SHA256"]
    323        if "Depends" in meta:
    324            for dep in meta["Depends"].split(", "):
    325                add_package_dependencies(dep.split()[0].split(":")[0])
    326 
    327    # Read the input file and create a dictionary mapping package names to URLs
    328    # and checksums.
    329    missing = set(DEBIAN_PACKAGES)
    330    package_dict: dict[str, str] = {}
    331    for meta in package_meta.values():
    332        package = meta["Package"]
    333        if package in missing:
    334            missing.remove(package)
    335            add_package_dependencies(package)
    336    if missing:
    337        raise Exception(f"Missing packages: {', '.join(missing)}")
    338 
    339    # Write the URLs and checksums of the requested packages to the output file
    340    output_file = os.path.join(SCRIPT_DIR, "generated_package_lists",
    341                               f"{RELEASE}.{arch}")
    342    with open(output_file, "w") as f:
    343        f.write("\n".join(sorted(package_dict)) + "\n")
    344    return package_dict
    345 
    346 
    347 def hacks_and_patches(install_root: str, script_dir: str, arch: str) -> None:
    348    banner("Misc Hacks & Patches")
    349 
    350    debian_dir = os.path.join(install_root, "debian")
    351    control_file = os.path.join(debian_dir, "control")
    352    # Create an empty control file
    353    open(control_file, "a").close()
    354 
    355    # Remove an unnecessary dependency on qtchooser.
    356    qtchooser_conf = os.path.join(install_root, "usr", "lib", TRIPLES[arch],
    357                                  "qt-default/qtchooser/default.conf")
    358    if os.path.exists(qtchooser_conf):
    359        os.remove(qtchooser_conf)
    360 
    361    # libxcomposite1 is missing a symbols file.
    362    atomic_copyfile(
    363        os.path.join(script_dir, "libxcomposite1-symbols"),
    364        os.path.join(install_root, "debian", "libxcomposite1", "DEBIAN",
    365                     "symbols"),
    366    )
    367 
    368    # __GLIBC_MINOR__ is used as a feature test macro. Replace it with the
    369    # earliest supported version of glibc (2.26).
    370    features_h = os.path.join(install_root, "usr", "include", "features.h")
    371    replace_in_file(features_h, r"(#define\s+__GLIBC_MINOR__)", r"\1 26 //")
    372 
    373    # fcntl64() was introduced in glibc 2.28. Make sure to use fcntl() instead.
    374    fcntl_h = os.path.join(install_root, "usr", "include", "fcntl.h")
    375    replace_in_file(
    376        fcntl_h,
    377        r"#ifndef __USE_FILE_OFFSET64(\nextern int fcntl)",
    378        r"#if 1\1",
    379    )
    380 
    381    # Do not use pthread_cond_clockwait as it was introduced in glibc 2.30.
    382    cppconfig_h = os.path.join(
    383        install_root,
    384        "usr",
    385        "include",
    386        TRIPLES[arch],
    387        "c++",
    388        "10",
    389        "bits",
    390        "c++config.h",
    391    )
    392    replace_in_file(cppconfig_h,
    393                    r"(#define\s+_GLIBCXX_USE_PTHREAD_COND_CLOCKWAIT)",
    394                    r"// \1")
    395 
    396    # Include limits.h in stdlib.h to fix an ODR issue.
    397    stdlib_h = os.path.join(install_root, "usr", "include", "stdlib.h")
    398    replace_in_file(stdlib_h, r"(#include <stddef.h>)",
    399                    r"\1\n#include <limits.h>")
    400 
    401    # Move pkgconfig scripts.
    402    pkgconfig_dir = os.path.join(install_root, "usr", "lib", "pkgconfig")
    403    os.makedirs(pkgconfig_dir, exist_ok=True)
    404    triple_pkgconfig_dir = os.path.join(install_root, "usr", "lib",
    405                                        TRIPLES[arch], "pkgconfig")
    406    if os.path.exists(triple_pkgconfig_dir):
    407        for file in os.listdir(triple_pkgconfig_dir):
    408            shutil.move(os.path.join(triple_pkgconfig_dir, file),
    409                        pkgconfig_dir)
    410 
    411    # Avoid requiring unsupported glibc versions.
    412    for lib in ["libc.so.6", "libm.so.6", "libcrypt.so.1"]:
    413        lib_path = os.path.join(install_root, "lib", TRIPLES[arch], lib)
    414        reversion_glibc.reversion_glibc(lib_path)
    415 
    416    # GTK4 is provided by bookworm (12), but pango is provided by bullseye
    417    # (11).  Fix the GTK4 pkgconfig file to relax the pango version
    418    # requirement.
    419    gtk4_pc = os.path.join(pkgconfig_dir, "gtk4.pc")
    420    replace_in_file(gtk4_pc, r"pango [>=0-9. ]*", "pango")
    421    replace_in_file(gtk4_pc, r"pangocairo [>=0-9. ]*", "pangocairo")
    422 
    423    # Remove a cyclic symlink: /usr/bin/X11 -> /usr/bin
    424    os.remove(os.path.join(install_root, "usr/bin/X11"))
    425 
    426 
    427 def replace_in_file(file_path: str, search_pattern: str,
    428                    replace_pattern: str) -> None:
    429    with open(file_path, "r") as file:
    430        content = file.read()
    431    with open(file_path, "w") as file:
    432        file.write(re.sub(search_pattern, replace_pattern, content))
    433 
    434 
    435 def install_into_sysroot(build_dir: str, install_root: str,
    436                         packages: dict[str, str]) -> None:
    437    """
    438    Installs libraries and headers into the sysroot environment.
    439    """
    440    banner("Install Libs And Headers Into Jail")
    441 
    442    debian_packages_dir = os.path.join(build_dir, "debian-packages")
    443    os.makedirs(debian_packages_dir, exist_ok=True)
    444 
    445    debian_dir = os.path.join(install_root, "debian")
    446    os.makedirs(debian_dir, exist_ok=True)
    447    for package, sha256sum in packages.items():
    448        package_name = os.path.basename(package)
    449        package_path = os.path.join(debian_packages_dir, package_name)
    450 
    451        banner(f"Installing {package_name}")
    452        download_or_copy(package, package_path)
    453        if hash_file(hashlib.sha256(), package_path) != sha256sum:
    454            raise ValueError(f"SHA256 mismatch for {package_path}")
    455 
    456        sub_banner(f"Extracting to {install_root}")
    457        subprocess.run(["dpkg-deb", "-x", package_path, install_root],
    458                       check=True)
    459 
    460        base_package = get_base_package_name(package_path)
    461        debian_package_dir = os.path.join(debian_dir, base_package, "DEBIAN")
    462 
    463        # Extract the control file
    464        os.makedirs(debian_package_dir, exist_ok=True)
    465        with subprocess.Popen(
    466            ["dpkg-deb", "-e", package_path, debian_package_dir],
    467                stdout=subprocess.PIPE,
    468                stderr=subprocess.PIPE,
    469        ) as proc:
    470            _, err = proc.communicate()
    471            if proc.returncode != 0:
    472                message = "Failed to extract control from"
    473                raise Exception(
    474                    f"{message} {package_path}: {err.decode('utf-8')}")
    475 
    476    # Prune /usr/share, leaving only allowlisted directories.
    477    USR_SHARE_ALLOWLIST = {
    478        "fontconfig",
    479        "pkgconfig",
    480        "wayland",
    481        "wayland-protocols",
    482    }
    483    usr_share = os.path.join(install_root, "usr", "share")
    484    for item in os.listdir(usr_share):
    485        full_path = os.path.join(usr_share, item)
    486        if os.path.isdir(full_path) and item not in USR_SHARE_ALLOWLIST:
    487            shutil.rmtree(full_path)
    488 
    489 
    490 def get_base_package_name(package_path: str) -> str:
    491    """
    492    Retrieves the base package name from a Debian package.
    493    """
    494    result = subprocess.run(["dpkg-deb", "--field", package_path, "Package"],
    495                            capture_output=True,
    496                            text=True)
    497    if result.returncode != 0:
    498        raise Exception(
    499            f"Failed to get package name from {package_path}: {result.stderr}")
    500    return result.stdout.strip()
    501 
    502 
    503 def cleanup_jail_symlinks(install_root: str) -> None:
    504    """
    505    Cleans up jail symbolic links by converting absolute symlinks
    506    into relative ones.
    507    """
    508    for root, dirs, files in os.walk(install_root):
    509        for name in files + dirs:
    510            full_path = os.path.join(root, name)
    511            if os.path.islink(full_path):
    512                target_path = os.readlink(full_path)
    513                if target_path == "/dev/null":
    514                    # Some systemd services get masked by symlinking them to
    515                    # /dev/null. It's safe to remove these.
    516                    os.remove(full_path)
    517                    continue
    518 
    519                # If the link's target does not exist, remove this broken link.
    520                if os.path.isabs(target_path):
    521                    absolute_target = os.path.join(install_root,
    522                                                   target_path.strip("/"))
    523                else:
    524                    absolute_target = os.path.join(os.path.dirname(full_path),
    525                                                   target_path)
    526                if not os.path.exists(absolute_target):
    527                    os.remove(full_path)
    528                    continue
    529 
    530                if os.path.isabs(target_path):
    531                    # Compute the relative path from the symlink to the target.
    532                    relative_path = os.path.relpath(
    533                        os.path.join(install_root, target_path.strip("/")),
    534                        os.path.dirname(full_path),
    535                    )
    536                    # Verify that the target exists inside the install_root.
    537                    joined_path = os.path.join(os.path.dirname(full_path),
    538                                               relative_path)
    539                    if not os.path.exists(joined_path):
    540                        raise Exception(
    541                            f"Link target doesn't exist: {joined_path}")
    542                    os.remove(full_path)
    543                    os.symlink(relative_path, full_path)
    544 
    545 
    546 def removing_unnecessary_files(install_root, arch):
    547    """
    548    Minimizes the sysroot by removing unnecessary files.
    549    """
    550    # Preserve these files.
    551    gcc_triple = "i686-linux-gnu" if arch == "i386" else TRIPLES[arch]
    552    ALLOWLIST = {
    553        "usr/bin/cups-config",
    554        f"usr/lib/gcc/{gcc_triple}/10/libgcc.a",
    555        f"usr/lib/{TRIPLES[arch]}/libc_nonshared.a",
    556        f"usr/lib/{TRIPLES[arch]}/libffi_pic.a",
    557    }
    558 
    559    for file in ALLOWLIST:
    560        assert os.path.exists(os.path.join(install_root, file))
    561 
    562    # Remove all executables and static libraries, and any symlinks that
    563    # were pointing to them.
    564    reverse_links = collections.defaultdict(list)
    565    remove = []
    566    for root, _, files in os.walk(install_root):
    567        for filename in files:
    568            filepath = os.path.join(root, filename)
    569            if os.path.relpath(filepath, install_root) in ALLOWLIST:
    570                continue
    571            if os.path.islink(filepath):
    572                target_path = os.readlink(filepath)
    573                if not os.path.isabs(target_path):
    574                    target_path = os.path.join(root, target_path)
    575                reverse_links[os.path.realpath(target_path)].append(filepath)
    576            elif "so" in filepath.split(".")[-3:]:
    577                continue
    578            elif os.access(filepath, os.X_OK) or filepath.endswith(".a"):
    579                remove.append(filepath)
    580    for filepath in remove:
    581        os.remove(filepath)
    582        for link in reverse_links[filepath]:
    583            os.remove(link)
    584 
    585 
    586 def strip_sections(install_root: str, arch: str):
    587    """
    588    Strips all sections from ELF files except for dynamic linking and
    589    essential sections. Skips static libraries (.a), object files (.o), and a
    590    few files used by other Chromium-related projects.
    591    """
    592    PRESERVED_FILES = (
    593        'libc-2.31.so',
    594        'libm-2.31.so',
    595        'ld-2.31.so',
    596    )
    597 
    598    PRESERVED_SECTIONS = {
    599        ".dynamic",
    600        ".dynstr",
    601        ".dynsym",
    602        ".gnu.version",
    603        ".gnu.version_d",
    604        ".gnu.version_r",
    605        ".hash",
    606        ".note.ABI-tag",
    607        ".note.gnu.build-id",
    608    }
    609 
    610    preserved_files_count = 0
    611    lib_arch_path = os.path.join(install_root, "lib", TRIPLES[arch])
    612    for root, _, files in os.walk(install_root):
    613        for file in files:
    614            file_path = os.path.join(root, file)
    615            if file_path.startswith(lib_arch_path) and file in PRESERVED_FILES:
    616                preserved_files_count += 1
    617                continue
    618 
    619            if (os.access(file, os.X_OK) or file.endswith((".a", ".o"))
    620                    or os.path.islink(file_path)):
    621                continue
    622 
    623            # Verify this is an ELF file
    624            with open(file_path, "rb") as f:
    625                magic = f.read(4)
    626                if magic != b"\x7fELF":
    627                    continue
    628 
    629            # Get section headers
    630            objdump_cmd = ["objdump", "-h", file_path]
    631            result = subprocess.run(objdump_cmd,
    632                                    check=True,
    633                                    text=True,
    634                                    capture_output=True)
    635            section_lines = result.stdout.splitlines()
    636 
    637            # Parse section names
    638            sections = set()
    639            for line in section_lines:
    640                parts = line.split()
    641                if len(parts) > 1 and parts[0].isdigit():
    642                    sections.add(parts[1])
    643 
    644            sections_to_remove = sections - PRESERVED_SECTIONS
    645            if sections_to_remove:
    646                objcopy_arch = "amd64" if arch == "i386" else arch
    647                objcopy_bin = TRIPLES[objcopy_arch] + "-objcopy"
    648                objcopy_cmd = ([objcopy_bin] + [
    649                    f"--remove-section={section}"
    650                    for section in sections_to_remove
    651                ] + [file_path])
    652                subprocess.run(objcopy_cmd, check=True, stderr=subprocess.PIPE)
    653    if preserved_files_count != len(PRESERVED_FILES):
    654        raise Exception("Expected file to preserve missing")
    655 
    656 
    657 def record_metadata(install_root: str) -> dict[str, tuple[float, float]]:
    658    """
    659    Recursively walk the install_root directory and record the metadata of all
    660    files. Symlinks are not followed. Returns a dictionary mapping each path
    661    (relative to install_root) to its original metadata.
    662    """
    663    metadata = {}
    664    for root, dirs, files in os.walk(install_root):
    665        for name in dirs + files:
    666            full_path = os.path.join(root, name)
    667            rel_path = os.path.relpath(full_path, install_root)
    668            st = os.lstat(full_path)
    669            metadata[rel_path] = (st.st_atime, st.st_mtime)
    670    return metadata
    671 
    672 
    673 def restore_metadata(install_root: str,
    674                     old_meta: dict[str, tuple[float, float]]) -> None:
    675    """
    676    1. Restore the metadata of any file that exists in old_meta.
    677    2. For all other files, set their timestamp to ARCHIVE_TIMESTAMP.
    678    3. For all directories (including install_root), set the timestamp
    679       to ARCHIVE_TIMESTAMP.
    680    """
    681    # Convert the timestamp to a UNIX epoch time.
    682    archive_time = time.mktime(
    683        time.strptime(ARCHIVE_TIMESTAMP, "%Y%m%dT%H%M%SZ"))
    684 
    685    # Walk through the install_root, applying old_meta where available;
    686    # otherwise set times to archive_time.
    687    for root, dirs, files in os.walk(install_root):
    688        # Directories get archive_time.
    689        os.utime(root, (archive_time, archive_time))
    690 
    691        # Files: old_meta if available, else archive_time.
    692        for file_name in files:
    693            file_path = os.path.join(root, file_name)
    694            if os.path.lexists(file_path):
    695                rel_path = os.path.relpath(file_path, install_root)
    696                if rel_path in old_meta:
    697                    restore_time = old_meta[rel_path]
    698                else:
    699                    restore_time = (archive_time, archive_time)
    700                os.utime(file_path, restore_time, follow_symlinks=False)
    701 
    702 
    703 def build_sysroot(arch: str) -> None:
    704    install_root = os.path.join(BUILD_DIR, f"{RELEASE}_{arch}_staging")
    705    clear_install_dir(install_root)
    706    packages = generate_package_list(arch)
    707    install_into_sysroot(BUILD_DIR, install_root, packages)
    708    old_metadata = record_metadata(install_root)
    709    hacks_and_patches(install_root, SCRIPT_DIR, arch)
    710    cleanup_jail_symlinks(install_root)
    711    removing_unnecessary_files(install_root, arch)
    712    strip_sections(install_root, arch)
    713    restore_metadata(install_root, old_metadata)
    714    create_tarball(install_root, arch)
    715 
    716 
    717 def upload_sysroot(arch: str) -> str:
    718    tarball_path = os.path.join(BUILD_DIR,
    719                                f"{DISTRO}_{RELEASE}_{arch}_sysroot.tar.xz")
    720    command = [
    721        "upload_to_google_storage_first_class.py",
    722        "--bucket",
    723        "chrome-linux-sysroot",
    724        tarball_path,
    725    ]
    726    return subprocess.check_output(command).decode("utf-8")
    727 
    728 
    729 def verify_package_listing(file_path: str, output_file: str,
    730                           dist: str) -> None:
    731    """
    732    Verifies the downloaded Packages.xz file against its checksum and GPG keys.
    733    """
    734    # Paths for Release and Release.gpg files
    735    repo_basedir = f"{ARCHIVE_URL}/dists/{dist}"
    736    release_list = f"{repo_basedir}/{RELEASE_FILE}"
    737    release_list_gpg = f"{repo_basedir}/{RELEASE_FILE_GPG}"
    738 
    739    release_file = os.path.join(BUILD_DIR, f"{dist}-{RELEASE_FILE}")
    740    release_file_gpg = os.path.join(BUILD_DIR, f"{dist}-{RELEASE_FILE_GPG}")
    741 
    742    if not os.path.exists(KEYRING_FILE):
    743        raise Exception(f"KEYRING_FILE not found: {KEYRING_FILE}")
    744 
    745    # Download Release and Release.gpg files
    746    download_or_copy_non_unique_filename(release_list, release_file)
    747    download_or_copy_non_unique_filename(release_list_gpg, release_file_gpg)
    748 
    749    # Verify Release file with GPG
    750    subprocess.run(
    751        ["gpgv", "--keyring", KEYRING_FILE, release_file_gpg, release_file],
    752        check=True)
    753 
    754    # Find the SHA256 checksum for the specific file in the Release file
    755    sha256sum_pattern = re.compile(r"([a-f0-9]{64})\s+\d+\s+" +
    756                                   re.escape(file_path) + r"$")
    757    sha256sum_match = None
    758    with open(release_file, "r") as f:
    759        for line in f:
    760            if match := sha256sum_pattern.search(line):
    761                sha256sum_match = match.group(1)
    762                break
    763 
    764    if not sha256sum_match:
    765        raise Exception(
    766            f"Checksum for {file_path} not found in {release_file}")
    767 
    768    if hash_file(hashlib.sha256(), output_file) != sha256sum_match:
    769        raise Exception(f"Checksum mismatch for {output_file}")
    770 
    771 
    772 def main():
    773    parser = argparse.ArgumentParser(
    774        description="Build and upload Debian sysroot images for Chromium.")
    775    parser.add_argument("command", choices=["build", "upload"])
    776    parser.add_argument("architecture", choices=list(TRIPLES))
    777    args = parser.parse_args()
    778 
    779    sanity_check()
    780 
    781    if args.command == "build":
    782        build_sysroot(args.architecture)
    783    elif args.command == "upload":
    784        upload_sysroot(args.architecture)
    785 
    786 
    787 if __name__ == "__main__":
    788    main()