tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

repackage_partner.py (11094B)


      1 # This Source Code Form is subject to the terms of the Mozilla Public
      2 # License, v. 2.0. If a copy of the MPL was not distributed with this
      3 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
      4 """
      5 Transform the repackage task into an actual task description.
      6 """
      7 
      8 import copy
      9 
     10 from taskgraph.transforms.base import TransformSequence
     11 from taskgraph.util.dependencies import get_primary_dependency
     12 from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
     13 from taskgraph.util.taskcluster import get_artifact_prefix
     14 from voluptuous import Optional, Required
     15 
     16 from gecko_taskgraph.transforms.repackage import (
     17    PACKAGE_FORMATS as PACKAGE_FORMATS_VANILLA,
     18 )
     19 from gecko_taskgraph.transforms.task import task_description_schema
     20 from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
     21 from gecko_taskgraph.util.partners import get_partner_config_by_kind
     22 from gecko_taskgraph.util.platforms import archive_format, executable_extension
     23 from gecko_taskgraph.util.workertypes import worker_type_implementation
     24 
     25 # When repacking the stub installer we need to pass a zip file and package name to the
     26 # repackage task. This is not needed for vanilla stub but analogous to the full installer.
     27 PACKAGE_FORMATS = copy.deepcopy(PACKAGE_FORMATS_VANILLA)
     28 PACKAGE_FORMATS["installer-stub"]["inputs"]["package"] = "target-stub{archive_format}"
     29 PACKAGE_FORMATS["installer-stub"]["args"].extend(["--package-name", "{package-name}"])
     30 
     31 packaging_description_schema = Schema({
     32    # unique label to describe this repackaging task
     33    Optional("label"): str,
     34    # Routes specific to this task, if defined
     35    Optional("routes"): [str],
     36    # passed through directly to the job description
     37    Optional("extra"): task_description_schema["extra"],
     38    # Shipping product and phase
     39    Optional("shipping-product"): task_description_schema["shipping-product"],
     40    Optional("shipping-phase"): task_description_schema["shipping-phase"],
     41    Required("package-formats"): optionally_keyed_by(
     42        "build-platform", "build-type", [str]
     43    ),
     44    # All l10n jobs use mozharness
     45    Required("mozharness"): {
     46        # Config files passed to the mozharness script
     47        Required("config"): optionally_keyed_by("build-platform", [str]),
     48        # Additional paths to look for mozharness configs in. These should be
     49        # relative to the base of the source checkout
     50        Optional("config-paths"): [str],
     51        # if true, perform a checkout of a comm-central based branch inside the
     52        # gecko checkout
     53        Optional("comm-checkout"): bool,
     54    },
     55    # Override the default priority for the project
     56    Optional("priority"): task_description_schema["priority"],
     57    Optional("task-from"): task_description_schema["task-from"],
     58    Optional("attributes"): task_description_schema["attributes"],
     59    Optional("dependencies"): task_description_schema["dependencies"],
     60    Optional("run-on-repo-type"): task_description_schema["run-on-repo-type"],
     61 })
     62 
     63 transforms = TransformSequence()
     64 
     65 
     66 @transforms.add
     67 def remove_name(config, jobs):
     68    for job in jobs:
     69        if "name" in job:
     70            del job["name"]
     71        yield job
     72 
     73 
     74 transforms.add_validate(packaging_description_schema)
     75 
     76 
     77 @transforms.add
     78 def copy_in_useful_magic(config, jobs):
     79    """Copy attributes from upstream task to be used for keyed configuration."""
     80    for job in jobs:
     81        dep = get_primary_dependency(config, job)
     82        assert dep
     83 
     84        job["build-platform"] = dep.attributes.get("build_platform")
     85        yield job
     86 
     87 
     88 @transforms.add
     89 def handle_keyed_by(config, jobs):
     90    """Resolve fields that can be keyed by platform, etc."""
     91    fields = [
     92        "mozharness.config",
     93        "package-formats",
     94    ]
     95    for job in jobs:
     96        job = copy.deepcopy(job)  # don't overwrite dict values here
     97        for field in fields:
     98            resolve_keyed_by(item=job, field=field, item_name="?")
     99        yield job
    100 
    101 
    102 @transforms.add
    103 def make_repackage_description(config, jobs):
    104    for job in jobs:
    105        dep_job = get_primary_dependency(config, job)
    106        assert dep_job
    107 
    108        label = job.get("label", dep_job.label.replace("signing-", "repackage-"))
    109        job["label"] = label
    110 
    111        yield job
    112 
    113 
    114 @transforms.add
    115 def make_job_description(config, jobs):
    116    for job in jobs:
    117        dep_job = get_primary_dependency(config, job)
    118        assert dep_job
    119 
    120        attributes = copy_attributes_from_dependent_job(dep_job)
    121        build_platform = attributes["build_platform"]
    122 
    123        if job["build-platform"].startswith("win"):
    124            if dep_job.kind.endswith("signing"):
    125                continue
    126        if job["build-platform"].startswith("macosx"):
    127            if dep_job.kind.endswith("repack"):
    128                continue
    129        dependencies = {dep_job.attributes.get("kind"): dep_job.label}
    130        dependencies.update(dep_job.dependencies)
    131 
    132        signing_task = None
    133        for dependency in dependencies.keys():
    134            if build_platform.startswith("macosx") and dependency.endswith("signing"):
    135                signing_task = dependency
    136            elif build_platform.startswith("win") and dependency.endswith("repack"):
    137                signing_task = dependency
    138 
    139        attributes["repackage_type"] = "repackage"
    140 
    141        repack_id = job["extra"]["repack_id"]
    142 
    143        partner_config = get_partner_config_by_kind(config, config.kind)
    144        partner, subpartner, _ = repack_id.split("/")
    145        repack_stub_installer = partner_config[partner][subpartner].get(
    146            "repack_stub_installer"
    147        )
    148        if build_platform.startswith("win32") and repack_stub_installer:
    149            job["package-formats"].append("installer-stub")
    150 
    151        repackage_config = []
    152        for format in job.get("package-formats"):
    153            command = copy.deepcopy(PACKAGE_FORMATS[format])
    154            substs = {
    155                "archive_format": archive_format(build_platform),
    156                "executable_extension": executable_extension(build_platform),
    157            }
    158            command["inputs"] = {
    159                name: filename.format(**substs)
    160                for name, filename in command["inputs"].items()
    161            }
    162            repackage_config.append(command)
    163 
    164        run = job.get("mozharness", {})
    165        run.update({
    166            "using": "mozharness",
    167            "script": "mozharness/scripts/repackage.py",
    168            "job-script": "taskcluster/scripts/builder/repackage.sh",
    169            "actions": ["setup", "repackage"],
    170            "extra-config": {
    171                "repackage_config": repackage_config,
    172            },
    173        })
    174 
    175        worker = {
    176            "chain-of-trust": True,
    177            "max-run-time": 3600,
    178            "taskcluster-proxy": True if get_artifact_prefix(dep_job) else False,
    179            "env": {
    180                "REPACK_ID": repack_id,
    181            },
    182            # Don't add generic artifact directory.
    183            "skip-artifacts": True,
    184        }
    185 
    186        worker_type = "b-linux"
    187 
    188        worker["artifacts"] = _generate_task_output_files(
    189            dep_job,
    190            worker_type_implementation(config.graph_config, config.params, worker_type),
    191            repackage_config,
    192            partner=repack_id,
    193        )
    194 
    195        description = (
    196            "Repackaging for repack_id '{repack_id}' for build '"
    197            "{build_platform}/{build_type}'".format(
    198                repack_id=job["extra"]["repack_id"],
    199                build_platform=attributes.get("build_platform"),
    200                build_type=attributes.get("build_type"),
    201            )
    202        )
    203 
    204        task = {
    205            "label": job["label"],
    206            "description": description,
    207            "worker-type": worker_type,
    208            "dependencies": dependencies,
    209            "attributes": attributes,
    210            "scopes": ["queue:get-artifact:releng/partner/*"],
    211            "run-on-projects": dep_job.attributes.get("run_on_projects"),
    212            "routes": job.get("routes", []),
    213            "extra": job.get("extra", {}),
    214            "worker": worker,
    215            "run": run,
    216            "fetches": _generate_download_config(
    217                dep_job,
    218                build_platform,
    219                signing_task,
    220                partner=repack_id,
    221                project=config.params["project"],
    222                repack_stub_installer=repack_stub_installer,
    223            ),
    224        }
    225 
    226        # we may have reduced the priority for partner jobs, otherwise task.py will set it
    227        if job.get("priority"):
    228            task["priority"] = job["priority"]
    229        if build_platform.startswith("macosx"):
    230            task.setdefault("fetches", {}).setdefault("toolchain", []).extend([
    231                "linux64-libdmg",
    232                "linux64-hfsplus",
    233                "linux64-node",
    234            ])
    235        yield task
    236 
    237 
    238 def _generate_download_config(
    239    task,
    240    build_platform,
    241    signing_task,
    242    partner=None,
    243    project=None,
    244    repack_stub_installer=False,
    245 ):
    246    locale_path = f"{partner}/" if partner else ""
    247 
    248    if build_platform.startswith("macosx"):
    249        return {
    250            signing_task: [
    251                {
    252                    "artifact": f"{locale_path}target.tar.gz",
    253                    "extract": False,
    254                },
    255            ],
    256        }
    257    if build_platform.startswith("win"):
    258        download_config = [
    259            {
    260                "artifact": f"{locale_path}target.zip",
    261                "extract": False,
    262            },
    263            f"{locale_path}setup.exe",
    264        ]
    265        if build_platform.startswith("win32") and repack_stub_installer:
    266            download_config.extend([
    267                {
    268                    "artifact": f"{locale_path}target-stub.zip",
    269                    "extract": False,
    270                },
    271                f"{locale_path}setup-stub.exe",
    272            ])
    273        return {signing_task: download_config}
    274 
    275    raise NotImplementedError(f'Unsupported build_platform: "{build_platform}"')
    276 
    277 
    278 def _generate_task_output_files(task, worker_implementation, repackage_config, partner):
    279    """We carefully generate an explicit list here, but there's an artifacts directory
    280    too, courtesy of generic_worker_add_artifacts() (windows) or docker_worker_add_artifacts().
    281    Any errors here are likely masked by that.
    282    """
    283    partner_output_path = f"{partner}/"
    284    artifact_prefix = get_artifact_prefix(task)
    285 
    286    if worker_implementation == ("docker-worker", "linux"):
    287        local_prefix = "/builds/worker/workspace/"
    288    elif worker_implementation == ("generic-worker", "windows"):
    289        local_prefix = "workspace/"
    290    else:
    291        raise NotImplementedError(
    292            f'Unsupported worker implementation: "{worker_implementation}"'
    293        )
    294 
    295    output_files = []
    296    for config in repackage_config:
    297        output_files.append({
    298            "type": "file",
    299            "path": "{}outputs/{}{}".format(
    300                local_prefix, partner_output_path, config["output"]
    301            ),
    302            "name": "{}/{}{}".format(
    303                artifact_prefix, partner_output_path, config["output"]
    304            ),
    305        })
    306    return output_files