tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

distro_package.py (8020B)


      1 # This Source Code Form is subject to the terms of the Mozilla Public
      2 # License, v. 2.0. If a copy of the MPL was not distributed with this
      3 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
      4 """
      5 Support for running spidermonkey jobs via dedicated scripts
      6 """
      7 
      8 import os
      9 import re
     10 
     11 import taskgraph
     12 from taskgraph.util.schema import Schema
     13 from voluptuous import Any, Optional, Required
     14 
     15 from gecko_taskgraph import GECKO
     16 from gecko_taskgraph.transforms.job import run_job_using
     17 from gecko_taskgraph.transforms.job.common import add_artifacts
     18 from gecko_taskgraph.util.hash import hash_path
     19 
     20 DSC_PACKAGE_RE = re.compile(".*(?=_)")
     21 SOURCE_PACKAGE_RE = re.compile(r".*(?=[-_]\d)")
     22 
     23 source_definition = {
     24    Required("url"): str,
     25    Required("sha256"): str,
     26 }
     27 
     28 common_schema = Schema({
     29    # URL/SHA256 of a source file to build, which can either be a source
     30    # control (.dsc), or a tarball.
     31    Required(Any("dsc", "tarball")): source_definition,
     32    # Package name. Normally derived from the source control or tarball file
     33    # name. Use in case the name doesn't match DSC_PACKAGE_RE or
     34    # SOURCE_PACKAGE_RE.
     35    Optional("name"): str,
     36    # Patch to apply to the extracted source.
     37    Optional("patch"): str,
     38    # Command to run before dpkg-buildpackage.
     39    Optional("pre-build-command"): str,
     40    # Architecture to build the package for.
     41    Optional("arch"): str,
     42    # List of package tasks to get build dependencies from.
     43    Optional("packages"): [str],
     44    # What resolver to use to install build dependencies. The default
     45    # (apt-get) is good in most cases, but in subtle cases involving
     46    # a *-backports archive, its solver might not be able to find a
     47    # solution that satisfies the build dependencies.
     48    Optional("resolver"): Any("apt-get", "aptitude"),
     49    # Base work directory used to set up the task.
     50    Required("workdir"): str,
     51 })
     52 
     53 debian_schema = common_schema.extend({
     54    Required("using"): "debian-package",
     55    # Debian distribution
     56    Required("dist"): str,
     57 })
     58 
     59 ubuntu_schema = common_schema.extend({
     60    Required("using"): "ubuntu-package",
     61    # Ubuntu distribution
     62    Required("dist"): str,
     63 })
     64 
     65 
     66 def common_package(config, job, taskdesc, distro, version):
     67    run = job["run"]
     68 
     69    name = taskdesc["label"].replace(f"{config.kind}-", "", 1)
     70 
     71    arch = run.get("arch", "amd64")
     72 
     73    worker = taskdesc["worker"]
     74    worker.setdefault("artifacts", [])
     75 
     76    image = "%s%d" % (distro, version)
     77    if arch != "amd64":
     78        image += "-" + arch
     79    image += "-packages"
     80    worker["docker-image"] = {"in-tree": image}
     81 
     82    add_artifacts(config, job, taskdesc, path="/tmp/artifacts")
     83 
     84    env = worker.setdefault("env", {})
     85    env["DEBFULLNAME"] = "Mozilla build team"
     86    env["DEBEMAIL"] = "dev-builds@lists.mozilla.org"
     87 
     88    if "dsc" in run:
     89        src = run["dsc"]
     90        unpack = "dpkg-source -x {src_file} {package}"
     91        package_re = DSC_PACKAGE_RE
     92    elif "tarball" in run:
     93        src = run["tarball"]
     94        unpack = (
     95            "mkdir {package} && tar -C {package} -axf {src_file} --strip-components=1"
     96        )
     97        package_re = SOURCE_PACKAGE_RE
     98    else:
     99        raise RuntimeError("Unreachable")
    100    src_url = src["url"]
    101    src_file = os.path.basename(src_url)
    102    src_sha256 = src["sha256"]
    103    package = run.get("name")
    104    if not package:
    105        package = package_re.match(src_file).group(0)
    106    unpack = unpack.format(src_file=src_file, package=package)
    107 
    108    resolver = run.get("resolver", "apt-get")
    109    if resolver == "apt-get":
    110        resolver = "apt-get -yyq --no-install-recommends"
    111    elif resolver == "aptitude":
    112        resolver = (
    113            "aptitude -y --without-recommends -o "
    114            "Aptitude::ProblemResolver::Hints::KeepBuildDeps="
    115            f'"reject {package}-build-deps :UNINST"'
    116        )
    117    else:
    118        raise RuntimeError("Unreachable")
    119 
    120    adjust = ""
    121    if "patch" in run:
    122        # We don't use robustcheckout or run-task to get a checkout. So for
    123        # this one file we'd need from a checkout, download it.
    124        env["PATCH_URL"] = config.params.file_url(
    125            "build/debian-packages/{patch}".format(patch=run["patch"]),
    126        )
    127        adjust += "curl -sL $PATCH_URL | patch -p1 && "
    128    if "pre-build-command" in run:
    129        adjust += run["pre-build-command"] + " && "
    130    if "tarball" in run:
    131        adjust += "mv ../{src_file} ../{package}_{ver}.orig.tar.gz && ".format(
    132            src_file=src_file,
    133            package=package,
    134            ver="$(dpkg-parsechangelog | awk '$1==\"Version:\"{print $2}' | cut -f 1 -d -)",
    135        )
    136    if "patch" not in run and "pre-build-command" not in run:
    137        adjust += (
    138            'debchange -l ".{prefix}moz" --distribution "{dist}"'
    139            ' "Mozilla backport for {dist}." < /dev/null && '
    140        ).format(
    141            prefix=name.split("-", 1)[0],
    142            dist=run["dist"],
    143        )
    144 
    145    worker["command"] = [
    146        "sh",
    147        "-x",
    148        "-c",
    149        # Add sources for packages coming from other package tasks.
    150        "/usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $PACKAGES && "
    151        "apt-get update && "
    152        # Upgrade packages that might have new versions in package tasks.
    153        "apt-get dist-upgrade && "
    154        "cd /tmp && "
    155        # Get, validate and extract the package source.
    156        "(dget -d -u {src_url} || exit 100) && "
    157        'echo "{src_sha256}  {src_file}" | sha256sum -c && '
    158        "{unpack} && "
    159        "cd {package} && "
    160        # Optionally apply patch and/or pre-build command.
    161        "{adjust}"
    162        # Install the necessary build dependencies.
    163        "(cd ..; mk-build-deps -i -r {package}/debian/control -t '{resolver}' || exit 100) && "
    164        # Build the package
    165        'DEB_BUILD_OPTIONS="parallel=$(nproc) nocheck" dpkg-buildpackage -sa && '
    166        # Copy the artifacts
    167        "mkdir -p {artifacts}/apt && "
    168        "dcmd cp ../{package}_*.changes {artifacts}/apt/ && "
    169        "cd {artifacts} && "
    170        # Make the artifacts directory usable as an APT repository.
    171        "apt-ftparchive sources apt | gzip -c9 > apt/Sources.gz && "
    172        "apt-ftparchive packages apt | gzip -c9 > apt/Packages.gz".format(
    173            package=package,
    174            src_url=src_url,
    175            src_file=src_file,
    176            src_sha256=src_sha256,
    177            unpack=unpack,
    178            adjust=adjust,
    179            artifacts="/tmp/artifacts",
    180            resolver=resolver,
    181        ),
    182    ]
    183 
    184    if run.get("packages"):
    185        env = worker.setdefault("env", {})
    186        env["PACKAGES"] = {
    187            "task-reference": " ".join(f"<{p}>" for p in run["packages"])
    188        }
    189        deps = taskdesc.setdefault("dependencies", {})
    190        for p in run["packages"]:
    191            deps[p] = f"packages-{p}"
    192 
    193    # Use the command generated above as the base for the index hash.
    194    # We rely on it not varying depending on the head_repository or head_rev.
    195    digest_data = list(worker["command"])
    196    if "patch" in run:
    197        digest_data.append(
    198            hash_path(os.path.join(GECKO, "build", "debian-packages", run["patch"]))
    199        )
    200 
    201    if not taskgraph.fast:
    202        taskdesc["cache"] = {
    203            "type": "packages.v1",
    204            "name": name,
    205            "digest-data": digest_data,
    206        }
    207 
    208 
    209 @run_job_using("docker-worker", "debian-package", schema=debian_schema)
    210 def docker_worker_debian_package(config, job, taskdesc):
    211    run = job["run"]
    212    version = {
    213        "wheezy": 7,
    214        "jessie": 8,
    215        "stretch": 9,
    216        "buster": 10,
    217        "bullseye": 11,
    218        "bookworm": 12,
    219        "trixie": 13,
    220    }[run["dist"]]
    221    common_package(config, job, taskdesc, "debian", version)
    222 
    223 
    224 @run_job_using("docker-worker", "ubuntu-package", schema=ubuntu_schema)
    225 def docker_worker_ubuntu_package(config, job, taskdesc):
    226    run = job["run"]
    227    version = {
    228        "bionic": 1804,
    229        "focal": 2004,
    230        "jammy": 2204,
    231        "noble": 2404,
    232    }[run["dist"]]
    233    common_package(config, job, taskdesc, "ubuntu", version)