tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

common.py (7926B)


      1 # This Source Code Form is subject to the terms of the Mozilla Public
      2 # License, v. 2.0. If a copy of the MPL was not distributed with this
      3 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
      4 """
      5 Common support for various job types.  These functions are all named after the
      6 worker implementation they operate on, and take the same three parameters, for
      7 consistency.
      8 """
      9 
     10 from taskgraph.transforms.run.common import CACHES, add_cache
     11 from taskgraph.util import json
     12 from taskgraph.util.keyed_by import evaluate_keyed_by
     13 from taskgraph.util.taskcluster import get_artifact_prefix
     14 
     15 SECRET_SCOPE = "secrets:get:project/releng/{trust_domain}/{kind}/level-{level}/{secret}"
     16 
     17 
     18 def add_artifacts(config, job, taskdesc, path):
     19    taskdesc["worker"].setdefault("artifacts", []).append({
     20        "name": get_artifact_prefix(taskdesc),
     21        "path": path,
     22        "type": "directory",
     23    })
     24 
     25 
     26 def docker_worker_add_artifacts(config, job, taskdesc):
     27    """Adds an artifact directory to the task"""
     28    path = "{workdir}/artifacts/".format(**job["run"])
     29    taskdesc["worker"].setdefault("env", {})["UPLOAD_DIR"] = path
     30    add_artifacts(config, job, taskdesc, path)
     31 
     32 
     33 def generic_worker_add_artifacts(config, job, taskdesc):
     34    """Adds an artifact directory to the task"""
     35    # The path is the location on disk; it doesn't necessarily
     36    # mean the artifacts will be public or private; that is set via the name
     37    # attribute in add_artifacts.
     38    path = get_artifact_prefix(taskdesc)
     39    taskdesc["worker"].setdefault("env", {})["UPLOAD_DIR"] = path
     40    add_artifacts(config, job, taskdesc, path=path)
     41 
     42 
     43 def get_cache_name(config, job):
     44    cache_name = "checkouts"
     45 
     46    if config.params["repository_type"] == "git":
     47        # Ensure tasks cloning git don't try to use an hg cache or vice versa.
     48        cache_name += "-git"
     49 
     50        # Shallow clones need their own cache because they can interfere with
     51        # tasks that aren't expecting a shallow clone.
     52        if job["run"].get("shallow-clone", True):
     53            cache_name += "-shallow"
     54 
     55    else:
     56        # Sparse checkouts need their own cache because they can interfere with
     57        # clients that aren't sparse aware.
     58        if job["run"]["sparse-profile"]:
     59            cache_name += "-sparse"
     60 
     61        # Workers using Mercurial >= 5.8 will enable revlog-compression-zstd, which
     62        # workers using older versions can't understand, so they can't share cache.
     63        # At the moment, only docker workers use the newer version.
     64        if job["worker"]["implementation"] == "docker-worker":
     65            cache_name += "-hg58"
     66 
     67    return cache_name
     68 
     69 
     70 def support_vcs_checkout(config, job, taskdesc, repo_configs):
     71    """Update a job/task with parameters to enable a VCS checkout.
     72 
     73    This can only be used with ``run-task`` tasks, as the cache name is
     74    reserved for ``run-task`` tasks.
     75    """
     76    worker = job["worker"]
     77    is_mac = worker["os"] == "macosx"
     78    is_win = worker["os"] == "windows"
     79    is_linux = worker["os"] == "linux" or "linux-bitbar" or "linux-lambda"
     80    is_docker = worker["implementation"] == "docker-worker"
     81    assert is_mac or is_win or is_linux
     82 
     83    if is_win:
     84        checkoutdir = "build"
     85        geckodir = f"{checkoutdir}/src"
     86        if "aarch64" in job["worker-type"] or "a64" in job["worker-type"]:
     87            # arm64 instances on azure don't support local ssds
     88            hgstore = f"{checkoutdir}/hg-store"
     89        else:
     90            hgstore = "y:/hg-shared"
     91    elif is_docker:
     92        checkoutdir = "{workdir}/checkouts".format(**job["run"])
     93        geckodir = f"{checkoutdir}/gecko"
     94        hgstore = f"{checkoutdir}/hg-store"
     95    else:
     96        checkoutdir = "checkouts"
     97        geckodir = f"{checkoutdir}/gecko"
     98        hgstore = f"{checkoutdir}/hg-shared"
     99 
    100    # Use some Gecko specific logic to determine cache name.
    101    CACHES["checkout"]["cache_name"] = get_cache_name
    102 
    103    env = taskdesc["worker"].setdefault("env", {})
    104    env.update({
    105        "HG_STORE_PATH": hgstore,
    106        "REPOSITORIES": json.dumps({
    107            repo.prefix: repo.name for repo in repo_configs.values()
    108        }),
    109    })
    110    for repo_config in repo_configs.values():
    111        env.update({
    112            f"{repo_config.prefix.upper()}_{key}": value
    113            for key, value in {
    114                "BASE_REPOSITORY": repo_config.base_repository,
    115                "HEAD_REPOSITORY": repo_config.head_repository,
    116                "HEAD_REV": repo_config.head_rev,
    117                "HEAD_REF": repo_config.head_ref,
    118                "REPOSITORY_TYPE": repo_config.type,
    119                "SSH_SECRET_NAME": repo_config.ssh_secret_name,
    120            }.items()
    121            if value is not None
    122        })
    123        if repo_config.ssh_secret_name:
    124            taskdesc["scopes"].append(f"secrets:get:{repo_config.ssh_secret_name}")
    125 
    126    gecko_path = env.setdefault("GECKO_PATH", geckodir)
    127 
    128    if "comm_base_repository" in config.params:
    129        taskdesc["worker"]["env"].update({
    130            "COMM_BASE_REPOSITORY": config.params["comm_base_repository"],
    131            "COMM_HEAD_REPOSITORY": config.params["comm_head_repository"],
    132            "COMM_HEAD_REV": config.params["comm_head_rev"],
    133        })
    134    elif job["run"].get("comm-checkout", False):
    135        raise Exception(
    136            "Can't checkout from comm-* repository if not given a repository."
    137        )
    138 
    139    # Give task access to hgfingerprint secret so it can pin the certificate
    140    # for hg.mozilla.org.
    141    taskdesc["scopes"].append("secrets:get:project/taskcluster/gecko/hgfingerprint")
    142    taskdesc["scopes"].append("secrets:get:project/taskcluster/gecko/hgmointernal")
    143 
    144    # only some worker platforms have taskcluster-proxy enabled
    145    if job["worker"]["implementation"] in ("docker-worker",):
    146        taskdesc["worker"]["taskcluster-proxy"] = True
    147 
    148    return gecko_path
    149 
    150 
    151 def setup_secrets(config, job, taskdesc):
    152    """Set up access to secrets via taskcluster-proxy.  The value of
    153    run['secrets'] should be a boolean or a list of secret names that
    154    can be accessed."""
    155    if not job["run"].get("secrets"):
    156        return
    157 
    158    taskdesc["worker"]["taskcluster-proxy"] = True
    159    secrets = job["run"]["secrets"]
    160    if secrets is True:
    161        secrets = ["*"]
    162    for secret in secrets:
    163        taskdesc["scopes"].append(
    164            SECRET_SCOPE.format(
    165                trust_domain=config.graph_config["trust-domain"],
    166                kind=job["treeherder"]["kind"],
    167                level=config.params["level"],
    168                secret=secret,
    169            )
    170        )
    171 
    172 
    173 def add_tooltool(config, job, taskdesc, internal=False):
    174    """Give the task access to tooltool.
    175 
    176    Enables the tooltool cache. Adds releng proxy. Configures scopes.
    177 
    178    By default, only public tooltool access will be granted. Access to internal
    179    tooltool can be enabled via ``internal=True``.
    180 
    181    This can only be used with ``run-task`` tasks, as the cache name is
    182    reserved for use with ``run-task``.
    183    """
    184 
    185    if job["worker"]["implementation"] in ("docker-worker",):
    186        add_cache(
    187            job,
    188            taskdesc,
    189            "tooltool-cache",
    190            "{workdir}/tooltool-cache".format(**job["run"]),
    191        )
    192 
    193        taskdesc["worker"].setdefault("env", {}).update({
    194            "TOOLTOOL_CACHE": "{workdir}/tooltool-cache".format(**job["run"]),
    195        })
    196    elif not internal:
    197        return
    198 
    199    taskdesc["worker"]["taskcluster-proxy"] = True
    200    taskdesc["scopes"].extend([
    201        "project:releng:services/tooltool/api/download/public",
    202    ])
    203 
    204    if internal:
    205        taskdesc["scopes"].extend([
    206            "project:releng:services/tooltool/api/download/internal",
    207        ])
    208 
    209 
    210 def get_expiration(config, policy="default"):
    211    expires = evaluate_keyed_by(
    212        config.graph_config["expiration-policy"],
    213        "artifact expiration",
    214        {"project": config.params["project"], "level": config.params["level"]},
    215    )[policy]
    216    return expires