task.py (97810B)
1 # This Source Code Form is subject to the terms of the Mozilla Public 2 # License, v. 2.0. If a copy of the MPL was not distributed with this 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 """ 5 These transformations take a task description and turn it into a TaskCluster 6 task definition (along with attributes, label, etc.). The input to these 7 transformations is generic to any kind of task, but abstracts away some of the 8 complexities of worker implementations, scopes, and treeherder annotations. 9 """ 10 11 import datetime 12 import hashlib 13 import os 14 import re 15 import time 16 17 from mozbuild.util import memoize 18 from mozilla_taskgraph.util.signed_artifacts import get_signed_artifacts 19 from taskcluster.utils import fromNow 20 from taskgraph import MAX_DEPENDENCIES 21 from taskgraph.transforms.base import TransformSequence 22 from taskgraph.transforms.task import payload_builder, payload_builders 23 from taskgraph.util.copy import deepcopy 24 from taskgraph.util.keyed_by import evaluate_keyed_by 25 from taskgraph.util.schema import ( 26 Schema, 27 optionally_keyed_by, 28 resolve_keyed_by, 29 taskref_or_string, 30 validate_schema, 31 ) 32 from taskgraph.util.treeherder import split_symbol 33 from voluptuous import All, Any, Extra, Match, NotIn, Optional, Required 34 35 from gecko_taskgraph import GECKO 36 from gecko_taskgraph.optimize.schema import OptimizationSchema 37 from gecko_taskgraph.transforms.job.common import get_expiration 38 from gecko_taskgraph.util import docker as dockerutil 39 from gecko_taskgraph.util.attributes import TRUNK_PROJECTS, is_try, release_level 40 from gecko_taskgraph.util.chunking import TEST_VARIANTS 41 from gecko_taskgraph.util.hash import hash_path 42 from gecko_taskgraph.util.partners import get_partners_to_be_published 43 from gecko_taskgraph.util.scriptworker import BALROG_ACTIONS, get_release_config 44 from gecko_taskgraph.util.workertypes import get_worker_type, worker_type_implementation 45 46 RUN_TASK_HG = os.path.join(GECKO, "taskcluster", "scripts", "run-task") 47 RUN_TASK_GIT = os.path.join( 48 GECKO, 49 "third_party", 50 "python", 51 "taskcluster_taskgraph", 52 "taskgraph", 53 "run-task", 54 "run-task", 55 ) 56 57 SCCACHE_GCS_PROJECT = "sccache-3" 58 59 60 @memoize 61 def _run_task_suffix(repo_type): 62 """String to append to cache names under control of run-task.""" 63 if repo_type == "hg": 64 return hash_path(RUN_TASK_HG)[0:20] 65 return hash_path(RUN_TASK_GIT)[0:20] 66 67 68 def _compute_geckoview_version(app_version, moz_build_date): 69 """Geckoview version string that matches geckoview gradle configuration""" 70 # Must be synchronized with /mobile/android/geckoview/build.gradle computeVersionCode(...) 71 version_without_milestone = re.sub(r"a[0-9]", "", app_version, 1) 72 parts = version_without_milestone.split(".") 73 return f"{parts[0]}.{parts[1]}.{moz_build_date}" 74 75 76 # A task description is a general description of a TaskCluster task 77 task_description_schema = Schema({ 78 # the label for this task 79 Required("label"): str, 80 # description of the task (for metadata) 81 Required("description"): str, 82 # attributes for this task 83 Optional("attributes"): {str: object}, 84 # relative path (from config.path) to the file task was defined in 85 Optional("task-from"): str, 86 # dependencies of this task, keyed by name; these are passed through 87 # verbatim and subject to the interpretation of the Task's get_dependencies 88 # method. 89 Optional("dependencies"): { 90 All( 91 str, 92 NotIn( 93 ["self", "decision"], 94 "Can't use 'self` or 'decision' as depdency names.", 95 ), 96 ): object, 97 }, 98 # Soft dependencies of this task, as a list of tasks labels 99 Optional("soft-dependencies"): [str], 100 # Dependencies that must be scheduled in order for this task to run. 101 Optional("if-dependencies"): [str], 102 Optional("requires"): Any("all-completed", "all-resolved"), 103 # expiration and deadline times, relative to task creation, with units 104 # (e.g., "14 days"). Defaults are set based on the project. 105 Optional("expires-after"): str, 106 Optional("deadline-after"): str, 107 Optional("expiration-policy"): str, 108 # custom routes for this task; the default treeherder routes will be added 109 # automatically 110 Optional("routes"): [str], 111 # custom scopes for this task; any scopes required for the worker will be 112 # added automatically. The following parameters will be substituted in each 113 # scope: 114 # {level} -- the scm level of this push 115 # {project} -- the project of this push 116 Optional("scopes"): [str], 117 # Tags 118 Optional("tags"): {str: str}, 119 # custom "task.extra" content 120 Optional("extra"): {str: object}, 121 # treeherder-related information; see 122 # https://firefox-ci-tc.services.mozilla.com/schemas/taskcluster-treeherder/v1/task-treeherder-config.json 123 # If not specified, no treeherder extra information or routes will be 124 # added to the task 125 Optional("treeherder"): { 126 # either a bare symbol, or "grp(sym)". 127 "symbol": str, 128 # the job kind 129 "kind": Any("build", "test", "other"), 130 # tier for this task 131 "tier": int, 132 # task platform, in the form platform/collection, used to set 133 # treeherder.machine.platform and treeherder.collection or 134 # treeherder.labels 135 "platform": Match("^[A-Za-z0-9_-]{1,50}/[A-Za-z0-9_-]{1,50}$"), 136 }, 137 # information for indexing this build so its artifacts can be discovered; 138 # if omitted, the build will not be indexed. 139 Optional("index"): { 140 # the name of the product this build produces 141 "product": str, 142 # the names to use for this job in the TaskCluster index 143 "job-name": str, 144 # Type of gecko v2 index to use 145 "type": Any( 146 "generic", 147 "l10n", 148 "shippable", 149 "shippable-l10n", 150 "android-shippable", 151 "android-shippable-with-multi-l10n", 152 "shippable-with-multi-l10n", 153 ), 154 # The rank that the task will receive in the TaskCluster 155 # index. A newly completed task supercedes the currently 156 # indexed task iff it has a higher rank. If unspecified, 157 # 'by-tier' behavior will be used. 158 "rank": Any( 159 # Rank is equal the timestamp of the build_date for tier-1 160 # tasks, and one for non-tier-1. This sorts tier-{2,3} 161 # builds below tier-1 in the index, but above eager-index. 162 "by-tier", 163 # Rank is given as an integer constant (e.g. zero to make 164 # sure a task is last in the index). 165 int, 166 # Rank is equal to the timestamp of the build_date. This 167 # option can be used to override the 'by-tier' behavior 168 # for non-tier-1 tasks. 169 "build_date", 170 ), 171 }, 172 # The `run_on_repo_type` attribute, defaulting to "hg". This dictates 173 # the types of repositories on which this task should be included in 174 # the target task set. See the attributes documentation for details. 175 Optional("run-on-repo-type"): [Any("git", "hg")], 176 # The `run_on_projects` attribute, defaulting to "all". This dictates the 177 # projects on which this task should be included in the target task set. 178 # See the attributes documentation for details. 179 Optional("run-on-projects"): optionally_keyed_by("build-platform", [str]), 180 # Like `run_on_projects`, `run-on-hg-branches` defaults to "all". 181 Optional("run-on-hg-branches"): optionally_keyed_by("project", [str]), 182 # Specifies git branches for which this task should run. 183 Optional("run-on-git-branches"): [str], 184 # The `shipping_phase` attribute, defaulting to None. This specifies the 185 # release promotion phase that this task belongs to. 186 Required("shipping-phase"): Any( 187 None, 188 "build", 189 "promote", 190 "push", 191 "ship", 192 ), 193 # The `shipping_product` attribute, defaulting to None. This specifies the 194 # release promotion product that this task belongs to. 195 Required("shipping-product"): Any(None, str), 196 # The `always-target` attribute will cause the task to be included in the 197 # target_task_graph regardless of filtering. Tasks included in this manner 198 # will be candidates for optimization even when `optimize_target_tasks` is 199 # False, unless the task was also explicitly chosen by the target_tasks 200 # method. 201 Required("always-target"): bool, 202 # Optimization to perform on this task during the optimization phase. 203 # Optimizations are defined in taskcluster/gecko_taskgraph/optimize.py. 204 Required("optimization"): OptimizationSchema, 205 # the provisioner-id/worker-type for the task. The following parameters will 206 # be substituted in this string: 207 # {level} -- the scm level of this push 208 "worker-type": str, 209 # Whether the job should use sccache compiler caching. 210 Required("use-sccache"): bool, 211 # information specific to the worker implementation that will run this task 212 Optional("worker"): { 213 Required("implementation"): str, 214 Extra: object, 215 }, 216 # Override the default priority for the project 217 Optional("priority"): str, 218 # Override the default 5 retries 219 Optional("retries"): int, 220 }) 221 222 TC_TREEHERDER_SCHEMA_URL = ( 223 "https://github.com/taskcluster/taskcluster-treeherder/" 224 "blob/master/schemas/task-treeherder-config.yml" 225 ) 226 227 228 UNKNOWN_GROUP_NAME = ( 229 "Treeherder group {} (from {}) has no name; add it to taskcluster/config.yml" 230 ) 231 232 V2_ROUTE_TEMPLATES = [ 233 "index.{trust-domain}.v2.{project}.latest.{product}.{job-name}", 234 "index.{trust-domain}.v2.{project}.pushdate.{build_date_long}.{product}.{job-name}", 235 "index.{trust-domain}.v2.{project}.pushdate.{build_date}.latest.{product}.{job-name}", 236 "index.{trust-domain}.v2.{project}.pushlog-id.{pushlog_id}.{product}.{job-name}", 237 "index.{trust-domain}.v2.{project}.revision.{branch_rev}.{product}.{job-name}", 238 "index.{trust-domain}.v2.{project}.revision.{branch_git_rev}.{product}.{job-name}", 239 ] 240 241 # {central, inbound, autoland} write to a "trunk" index prefix. This facilitates 242 # walking of tasks with similar configurations. 243 V2_TRUNK_ROUTE_TEMPLATES = [ 244 "index.{trust-domain}.v2.trunk.revision.{branch_rev}.{product}.{job-name}", 245 ] 246 247 V2_SHIPPABLE_TEMPLATES = [ 248 "index.{trust-domain}.v2.{project}.shippable.latest.{product}.{job-name}", 249 "index.{trust-domain}.v2.{project}.shippable.{build_date}.revision.{branch_rev}.{product}.{job-name}", # noqa - too long 250 "index.{trust-domain}.v2.{project}.shippable.{build_date}.latest.{product}.{job-name}", 251 "index.{trust-domain}.v2.{project}.shippable.revision.{branch_rev}.{product}.{job-name}", 252 "index.{trust-domain}.v2.{project}.shippable.revision.{branch_git_rev}.{product}.{job-name}", 253 ] 254 255 V2_SHIPPABLE_L10N_TEMPLATES = [ 256 "index.{trust-domain}.v2.{project}.shippable.latest.{product}-l10n.{job-name}.{locale}", 257 "index.{trust-domain}.v2.{project}.shippable.{build_date}.revision.{branch_rev}.{product}-l10n.{job-name}.{locale}", # noqa - too long 258 "index.{trust-domain}.v2.{project}.shippable.{build_date}.latest.{product}-l10n.{job-name}.{locale}", # noqa - too long 259 "index.{trust-domain}.v2.{project}.shippable.revision.{branch_rev}.{product}-l10n.{job-name}.{locale}", # noqa - too long 260 ] 261 262 V2_L10N_TEMPLATES = [ 263 "index.{trust-domain}.v2.{project}.revision.{branch_rev}.{product}-l10n.{job-name}.{locale}", 264 "index.{trust-domain}.v2.{project}.pushdate.{build_date_long}.{product}-l10n.{job-name}.{locale}", # noqa - too long 265 "index.{trust-domain}.v2.{project}.pushlog-id.{pushlog_id}.{product}-l10n.{job-name}.{locale}", 266 "index.{trust-domain}.v2.{project}.latest.{product}-l10n.{job-name}.{locale}", 267 ] 268 269 # This index is specifically for builds that include geckoview releases, 270 # so we can hard-code the project to "geckoview" 271 V2_GECKOVIEW_RELEASE = "index.{trust-domain}.v2.{project}.geckoview-version.{geckoview-version}.{product}.{job-name}" # noqa - too long 272 273 # the roots of the treeherder routes 274 TREEHERDER_ROUTE_ROOT = "tc-treeherder" 275 276 277 def get_branch_rev(config): 278 return config.params[ 279 "{}head_rev".format(config.graph_config["project-repo-param-prefix"]) 280 ] 281 282 283 def get_branch_git_rev(config): 284 return config.params[ 285 "{}head_git_rev".format(config.graph_config["project-repo-param-prefix"]) 286 ] 287 288 289 def get_branch_repo(config): 290 return config.params[ 291 "{}head_repository".format( 292 config.graph_config["project-repo-param-prefix"], 293 ) 294 ] 295 296 297 def get_project_alias(config): 298 if config.params["tasks_for"].startswith("github-pull-request"): 299 return f"{config.params['project']}-pr" 300 return config.params["project"] 301 302 303 @memoize 304 def get_default_priority(graph_config, project): 305 return evaluate_keyed_by( 306 graph_config["task-priority"], "Graph Config", {"project": project} 307 ) 308 309 310 # define a collection of index builders, depending on the type implementation 311 index_builders = {} 312 313 314 def index_builder(name): 315 def wrap(func): 316 assert name not in index_builders, f"duplicate index builder name {name}" 317 index_builders[name] = func 318 return func 319 320 return wrap 321 322 323 UNSUPPORTED_INDEX_PRODUCT_ERROR = """\ 324 The gecko-v2 product {product} is not in the list of configured products in 325 `taskcluster/config.yml'. 326 """ 327 328 329 def verify_index(config, index): 330 product = index["product"] 331 if product not in config.graph_config["index"]["products"]: 332 raise Exception(UNSUPPORTED_INDEX_PRODUCT_ERROR.format(product=product)) 333 334 335 RUN_TASK_RE = re.compile(r"run-task(-(git|hg))?$") 336 337 338 def is_run_task(cmd: str) -> bool: 339 return bool(re.search(RUN_TASK_RE, cmd)) 340 341 342 @payload_builder( 343 "docker-worker", 344 schema={ 345 Required("os"): "linux", 346 # For tasks that will run in docker-worker, this is the 347 # name of the docker image or in-tree docker image to run the task in. If 348 # in-tree, then a dependency will be created automatically. This is 349 # generally `desktop-test`, or an image that acts an awful lot like it. 350 Required("docker-image"): Any( 351 # a raw Docker image path (repo/image:tag) 352 str, 353 # an in-tree generated docker image (from `taskcluster/docker/<name>`) 354 {"in-tree": str}, 355 # an indexed docker image 356 {"indexed": str}, 357 ), 358 # worker features that should be enabled 359 Required("chain-of-trust"): bool, 360 Required("taskcluster-proxy"): bool, 361 Required("allow-ptrace"): bool, 362 Required("loopback-video"): bool, 363 Required("loopback-audio"): bool, 364 Required("docker-in-docker"): bool, # (aka 'dind') 365 Required("privileged"): bool, 366 Optional("kvm"): bool, 367 # Paths to Docker volumes. 368 # 369 # For in-tree Docker images, volumes can be parsed from Dockerfile. 370 # This only works for the Dockerfile itself: if a volume is defined in 371 # a base image, it will need to be declared here. Out-of-tree Docker 372 # images will also require explicit volume annotation. 373 # 374 # Caches are often mounted to the same path as Docker volumes. In this 375 # case, they take precedence over a Docker volume. But a volume still 376 # needs to be declared for the path. 377 Optional("volumes"): [str], 378 Optional( 379 "required-volumes", 380 description=( 381 "Paths that are required to be volumes for performance reasons. " 382 "For in-tree images, these paths will be checked to verify that they " 383 "are defined as volumes." 384 ), 385 ): [str], 386 # caches to set up for the task 387 Optional("caches"): [ 388 { 389 # only one type is supported by any of the workers right now 390 "type": "persistent", 391 # name of the cache, allowing re-use by subsequent tasks naming the 392 # same cache 393 "name": str, 394 # location in the task image where the cache will be mounted 395 "mount-point": str, 396 # Whether the cache is not used in untrusted environments 397 # (like the Try repo). 398 Optional("skip-untrusted"): bool, 399 } 400 ], 401 # artifacts to extract from the task image after completion 402 Optional("artifacts"): [ 403 { 404 # type of artifact -- simple file, or recursive directory 405 "type": Any("file", "directory"), 406 # task image path from which to read artifact 407 "path": str, 408 # name of the produced artifact (root of the names for 409 # type=directory) 410 "name": str, 411 "expires-after": str, 412 } 413 ], 414 # environment variables 415 Required("env"): {str: taskref_or_string}, 416 # the command to run; if not given, docker-worker will default to the 417 # command in the docker image 418 Optional("command"): [taskref_or_string], 419 # the maximum time to run, in seconds 420 Required("max-run-time"): int, 421 # the exit status code(s) that indicates the task should be retried 422 Optional("retry-exit-status"): [int], 423 # the exit status code(s) that indicates the caches used by the task 424 # should be purged 425 Optional("purge-caches-exit-status"): [int], 426 # Whether any artifacts are assigned to this worker 427 Optional("skip-artifacts"): bool, 428 }, 429 ) 430 def build_docker_worker_payload(config, task, task_def): 431 worker = task["worker"] 432 level = int(config.params["level"]) 433 434 image = worker["docker-image"] 435 if isinstance(image, dict): 436 if "in-tree" in image: 437 name = image["in-tree"] 438 docker_image_task = "docker-image-" + image["in-tree"] 439 task.setdefault("dependencies", {})["docker-image"] = docker_image_task 440 441 image = { 442 "path": "public/image.tar.zst", 443 "taskId": {"task-reference": "<docker-image>"}, 444 "type": "task-image", 445 } 446 447 # Find VOLUME in Dockerfile. 448 volumes = dockerutil.parse_volumes(name) 449 for v in sorted(volumes): 450 if v in worker["volumes"]: 451 raise Exception( 452 "volume %s already defined; " 453 "if it is defined in a Dockerfile, " 454 "it does not need to be specified in the " 455 "worker definition" % v 456 ) 457 458 worker["volumes"].append(v) 459 460 elif "indexed" in image: 461 image = { 462 "path": "public/image.tar.zst", 463 "namespace": image["indexed"], 464 "type": "indexed-image", 465 } 466 else: 467 raise Exception("unknown docker image type") 468 469 features = {} 470 471 if worker.get("taskcluster-proxy"): 472 features["taskclusterProxy"] = True 473 474 if worker.get("allow-ptrace"): 475 features["allowPtrace"] = True 476 task_def["scopes"].append("docker-worker:feature:allowPtrace") 477 478 if worker.get("chain-of-trust"): 479 features["chainOfTrust"] = True 480 481 if worker.get("docker-in-docker"): 482 features["dind"] = True 483 484 # Never enable sccache on the toolchains repo, as there is no benefit from it 485 # because each push uses a different compiler. 486 if task.get("use-sccache") and config.params["project"] != "toolchains": 487 features["taskclusterProxy"] = True 488 task_def["scopes"].append( 489 "assume:project:taskcluster:{trust_domain}:level-{level}-sccache-buckets".format( 490 trust_domain=config.graph_config["trust-domain"], 491 level=config.params["level"], 492 ) 493 ) 494 worker["env"]["USE_SCCACHE"] = "1" 495 worker["env"]["SCCACHE_GCS_PROJECT"] = SCCACHE_GCS_PROJECT 496 # Disable sccache idle shutdown. 497 worker["env"]["SCCACHE_IDLE_TIMEOUT"] = "0" 498 else: 499 worker["env"]["SCCACHE_DISABLE"] = "1" 500 501 capabilities = {} 502 503 for lo in "audio", "video": 504 if worker.get("loopback-" + lo): 505 capitalized = "loopback" + lo.capitalize() 506 devices = capabilities.setdefault("devices", {}) 507 devices[capitalized] = True 508 task_def["scopes"].append("docker-worker:capability:device:" + capitalized) 509 510 if worker.get("kvm"): 511 devices = capabilities.setdefault("devices", {}) 512 devices["kvm"] = True 513 task_def["scopes"].append("docker-worker:capability:device:kvm") 514 515 if worker.get("privileged"): 516 capabilities["privileged"] = True 517 task_def["scopes"].append("docker-worker:capability:privileged") 518 519 task_def["payload"] = payload = { 520 "image": image, 521 "env": worker["env"], 522 } 523 if "command" in worker: 524 payload["command"] = worker["command"] 525 526 if "max-run-time" in worker: 527 payload["maxRunTime"] = worker["max-run-time"] 528 529 run_task = is_run_task(payload.get("command", [""])[0]) 530 531 # run-task exits EXIT_PURGE_CACHES if there is a problem with caches. 532 # Automatically retry the tasks and purge caches if we see this exit 533 # code. 534 # TODO move this closer to code adding run-task once bug 1469697 is 535 # addressed. 536 if run_task: 537 worker.setdefault("retry-exit-status", []).append(72) 538 worker.setdefault("purge-caches-exit-status", []).append(72) 539 540 payload["onExitStatus"] = {} 541 if "retry-exit-status" in worker: 542 payload["onExitStatus"]["retry"] = worker["retry-exit-status"] 543 if "purge-caches-exit-status" in worker: 544 payload["onExitStatus"]["purgeCaches"] = worker["purge-caches-exit-status"] 545 546 if "artifacts" in worker: 547 artifacts = {} 548 for artifact in worker["artifacts"]: 549 artifacts[artifact["name"]] = { 550 "path": artifact["path"], 551 "type": artifact["type"], 552 "expires": {"relative-datestamp": artifact["expires-after"]}, 553 } 554 payload["artifacts"] = artifacts 555 556 if isinstance(worker.get("docker-image"), str): 557 out_of_tree_image = worker["docker-image"] 558 else: 559 out_of_tree_image = None 560 image = worker.get("docker-image", {}).get("in-tree") 561 562 if "caches" in worker: 563 caches = {} 564 565 # run-task knows how to validate caches. 566 # 567 # To help ensure new run-task features and bug fixes don't interfere 568 # with existing caches, we seed the hash of run-task into cache names. 569 # So, any time run-task changes, we should get a fresh set of caches. 570 # This means run-task can make changes to cache interaction at any time 571 # without regards for backwards or future compatibility. 572 # 573 # But this mechanism only works for in-tree Docker images that are built 574 # with the current run-task! For out-of-tree Docker images, we have no 575 # way of knowing their content of run-task. So, in addition to varying 576 # cache names by the contents of run-task, we also take the Docker image 577 # name into consideration. This means that different Docker images will 578 # never share the same cache. This is a bit unfortunate. But it is the 579 # safest thing to do. Fortunately, most images are defined in-tree. 580 # 581 # For out-of-tree Docker images, we don't strictly need to incorporate 582 # the run-task content into the cache name. However, doing so preserves 583 # the mechanism whereby changing run-task results in new caches 584 # everywhere. 585 586 # As an additional mechanism to force the use of different caches, the 587 # string literal in the variable below can be changed. This is 588 # preferred to changing run-task because it doesn't require images 589 # to be rebuilt. 590 cache_version = "v3" 591 592 if run_task: 593 suffix = ( 594 f"{cache_version}-{_run_task_suffix(config.params['repository_type'])}" 595 ) 596 597 if out_of_tree_image: 598 name_hash = hashlib.sha256( 599 out_of_tree_image.encode("utf-8") 600 ).hexdigest() 601 suffix += name_hash[0:12] 602 603 else: 604 suffix = cache_version 605 606 skip_untrusted = is_try(config.params) or level == 1 607 608 for cache in worker["caches"]: 609 # Some caches aren't enabled in environments where we can't 610 # guarantee certain behavior. Filter those out. 611 if cache.get("skip-untrusted") and skip_untrusted: 612 continue 613 614 name = "{trust_domain}-level-{level}-{name}-{suffix}".format( 615 trust_domain=config.graph_config["trust-domain"], 616 level=config.params["level"], 617 name=cache["name"], 618 suffix=suffix, 619 ) 620 621 caches[name] = cache["mount-point"] 622 task_def["scopes"].append("docker-worker:cache:%s" % name) 623 624 # Assertion: only run-task is interested in this. 625 if run_task: 626 payload["env"]["TASKCLUSTER_CACHES"] = ";".join(sorted(caches.values())) 627 628 payload["cache"] = caches 629 630 # And send down volumes information to run-task as well. 631 if run_task and worker.get("volumes"): 632 payload["env"]["TASKCLUSTER_VOLUMES"] = ";".join(sorted(worker["volumes"])) 633 634 if payload.get("cache") and skip_untrusted: 635 payload["env"]["TASKCLUSTER_UNTRUSTED_CACHES"] = "1" 636 637 if features: 638 payload["features"] = features 639 if capabilities: 640 payload["capabilities"] = capabilities 641 642 check_caches_are_volumes(task) 643 check_required_volumes(task) 644 645 646 @payload_builder( 647 "generic-worker", 648 schema={ 649 Required("os"): Any( 650 "windows", "macosx", "linux", "linux-bitbar", "linux-lambda" 651 ), 652 # see http://schemas.taskcluster.net/generic-worker/v1/payload.json 653 # and https://docs.taskcluster.net/reference/workers/generic-worker/payload 654 # command is a list of commands to run, sequentially 655 # on Windows, each command is a string, on OS X and Linux, each command is 656 # a string array 657 Required("command"): Any( 658 [taskref_or_string], 659 [[taskref_or_string]], # Windows # Linux / OS X 660 ), 661 # artifacts to extract from the task image after completion; note that artifacts 662 # for the generic worker cannot have names 663 Optional("artifacts"): [ 664 { 665 # type of artifact -- simple file, or recursive directory 666 "type": Any("file", "directory"), 667 # filesystem path from which to read artifact 668 "path": str, 669 # if not specified, path is used for artifact name 670 Optional("name"): str, 671 "expires-after": str, 672 } 673 ], 674 # Directories and/or files to be mounted. 675 # The actual allowed combinations are stricter than the model below, 676 # but this provides a simple starting point. 677 # See https://docs.taskcluster.net/reference/workers/generic-worker/payload 678 Optional("mounts"): [ 679 { 680 # A unique name for the cache volume, implies writable cache directory 681 # (otherwise mount is a read-only file or directory). 682 Optional("cache-name"): str, 683 # Optional content for pre-loading cache, or mandatory content for 684 # read-only file or directory. Pre-loaded content can come from either 685 # a task artifact or from a URL. 686 Optional("content"): { 687 # *** Either (artifact and task-id) or url must be specified. *** 688 # Artifact name that contains the content. 689 Optional("artifact"): str, 690 # Task ID that has the artifact that contains the content. 691 Optional("task-id"): taskref_or_string, 692 # URL that supplies the content in response to an unauthenticated 693 # GET request. 694 Optional("url"): str, 695 }, 696 # *** Either file or directory must be specified. *** 697 # If mounting a cache or read-only directory, the filesystem location of 698 # the directory should be specified as a relative path to the task 699 # directory here. 700 Optional("directory"): str, 701 # If mounting a file, specify the relative path within the task 702 # directory to mount the file (the file will be read only). 703 Optional("file"): str, 704 # Required if and only if `content` is specified and mounting a 705 # directory (not a file). This should be the archive format of the 706 # content (either pre-loaded cache or read-only directory). 707 Optional("format"): Any("rar", "tar.bz2", "tar.gz", "zip", "tar.xz"), 708 } 709 ], 710 # environment variables 711 Required("env"): {str: taskref_or_string}, 712 # the maximum time to run, in seconds 713 Required("max-run-time"): int, 714 # os user groups for test task workers 715 Optional("os-groups"): [str], 716 # feature for test task to run as administarotr 717 Optional("run-as-administrator"): bool, 718 # optional features 719 Required("chain-of-trust"): bool, 720 Optional("taskcluster-proxy"): bool, 721 # the exit status code(s) that indicates the task should be retried 722 Optional("retry-exit-status"): [int], 723 # Wether any artifacts are assigned to this worker 724 Optional("skip-artifacts"): bool, 725 }, 726 ) 727 def build_generic_worker_payload(config, task, task_def): 728 worker = task["worker"] 729 features = {} 730 731 task_def["payload"] = { 732 "command": worker["command"], 733 "maxRunTime": worker["max-run-time"], 734 } 735 736 if worker["os"] == "windows": 737 task_def["payload"]["onExitStatus"] = { 738 "retry": [ 739 # These codes (on windows) indicate a process interruption, 740 # rather than a task run failure. See bug 1544403. 741 1073807364, # process force-killed due to system shutdown 742 3221225786, # sigint (any interrupt) 743 ] 744 } 745 if "retry-exit-status" in worker: 746 task_def["payload"].setdefault("onExitStatus", {}).setdefault( 747 "retry", [] 748 ).extend(worker["retry-exit-status"]) 749 if worker["os"] in ["linux-bitbar", "linux-lambda"]: 750 task_def["payload"].setdefault("onExitStatus", {}).setdefault("retry", []) 751 # exit code 4 is used to indicate an intermittent android device error 752 if 4 not in task_def["payload"]["onExitStatus"]["retry"]: 753 task_def["payload"]["onExitStatus"]["retry"].extend([4]) 754 755 env = worker.get("env", {}) 756 757 # Never enable sccache on the toolchains repo, as there is no benefit from it 758 # because each push uses a different compiler. 759 if task.get("use-sccache") and config.params["project"] != "toolchains": 760 features["taskclusterProxy"] = True 761 task_def["scopes"].append( 762 "assume:project:taskcluster:{trust_domain}:level-{level}-sccache-buckets".format( 763 trust_domain=config.graph_config["trust-domain"], 764 level=config.params["level"], 765 ) 766 ) 767 env["USE_SCCACHE"] = "1" 768 worker["env"]["SCCACHE_GCS_PROJECT"] = SCCACHE_GCS_PROJECT 769 # Disable sccache idle shutdown. 770 env["SCCACHE_IDLE_TIMEOUT"] = "0" 771 else: 772 env["SCCACHE_DISABLE"] = "1" 773 774 if env: 775 task_def["payload"]["env"] = env 776 777 artifacts = [] 778 779 for artifact in worker.get("artifacts", []): 780 a = { 781 "path": artifact["path"], 782 "type": artifact["type"], 783 "expires": {"relative-datestamp": artifact["expires-after"]}, 784 } 785 if "name" in artifact: 786 a["name"] = artifact["name"] 787 artifacts.append(a) 788 789 if artifacts: 790 task_def["payload"]["artifacts"] = artifacts 791 792 # Need to copy over mounts, but rename keys to respect naming convention 793 # * 'cache-name' -> 'cacheName' 794 # * 'task-id' -> 'taskId' 795 # All other key names are already suitable, and don't need renaming. 796 mounts = deepcopy(worker.get("mounts", [])) 797 for mount in mounts: 798 if "cache-name" in mount: 799 mount["cacheName"] = "{trust_domain}-level-{level}-{name}".format( 800 trust_domain=config.graph_config["trust-domain"], 801 level=config.params["level"], 802 name=mount.pop("cache-name"), 803 ) 804 task_def["scopes"].append( 805 "generic-worker:cache:{}".format(mount["cacheName"]) 806 ) 807 if "content" in mount: 808 if "task-id" in mount["content"]: 809 mount["content"]["taskId"] = mount["content"].pop("task-id") 810 if "artifact" in mount["content"]: 811 if not mount["content"]["artifact"].startswith("public/"): 812 task_def["scopes"].append( 813 "queue:get-artifact:{}".format(mount["content"]["artifact"]) 814 ) 815 816 if mounts: 817 task_def["payload"]["mounts"] = mounts 818 819 if worker.get("os-groups"): 820 task_def["payload"]["osGroups"] = worker["os-groups"] 821 task_def["scopes"].extend([ 822 "generic-worker:os-group:{}/{}".format(task["worker-type"], group) 823 for group in worker["os-groups"] 824 ]) 825 826 if worker.get("chain-of-trust"): 827 features["chainOfTrust"] = True 828 829 if worker.get("taskcluster-proxy"): 830 features["taskclusterProxy"] = True 831 832 if worker.get("run-as-administrator", False): 833 features["runAsAdministrator"] = True 834 task_def["scopes"].append( 835 "generic-worker:run-as-administrator:{}".format(task["worker-type"]), 836 ) 837 838 if features: 839 task_def["payload"]["features"] = features 840 841 842 @payload_builder( 843 "iscript", 844 schema={ 845 Required("signing-type"): str, 846 # the maximum time to run, in seconds 847 Required("max-run-time"): int, 848 # list of artifact URLs for the artifacts that should be signed 849 Required("upstream-artifacts"): [ 850 { 851 # taskId of the task with the artifact 852 Required("taskId"): taskref_or_string, 853 # type of signing task (for CoT) 854 Required("taskType"): str, 855 # Paths to the artifacts to sign 856 Required("paths"): [str], 857 # Signing formats to use on each of the paths 858 Required("formats"): [str], 859 Optional("singleFileGlobs"): [str], 860 } 861 ], 862 # behavior for mac iscript 863 Optional("mac-behavior"): Any( 864 "apple_notarization", 865 "apple_notarization_stacked", 866 "mac_sign_and_pkg", 867 "mac_sign_and_pkg_hardened", 868 "mac_geckodriver", 869 "mac_notarize_geckodriver", 870 "mac_single_file", 871 "mac_notarize_single_file", 872 ), 873 Optional("entitlements-url"): str, 874 Optional("requirements-plist-url"): str, 875 Optional("provisioning-profile-config"): [ 876 { 877 Required("profile_name"): str, 878 Required("target_path"): str, 879 } 880 ], 881 Optional("hardened-sign-config"): [ 882 { 883 Optional("deep"): bool, 884 Optional("runtime"): bool, 885 Optional("force"): bool, 886 Optional("entitlements"): str, 887 Optional("requirements"): str, 888 Required("globs"): [str], 889 } 890 ], 891 }, 892 ) 893 def build_iscript_payload(config, task, task_def): 894 worker = task["worker"] 895 896 task_def["payload"] = { 897 "maxRunTime": worker["max-run-time"], 898 "upstreamArtifacts": worker["upstream-artifacts"], 899 } 900 if worker.get("mac-behavior"): 901 task_def["payload"]["behavior"] = worker["mac-behavior"] 902 for attribute in ( 903 "entitlements-url", 904 "requirements-plist-url", 905 "hardened-sign-config", 906 "provisioning-profile-config", 907 ): 908 if worker.get(attribute): 909 task_def["payload"][attribute] = worker[attribute] 910 911 # Set scopes 912 scope_prefix = config.graph_config["scriptworker"]["scope-prefix"] 913 scopes = set(task_def.get("scopes", [])) 914 scopes.add(f"{scope_prefix}:signing:cert:{worker['signing-type']}") 915 task_def["scopes"] = sorted(scopes) 916 917 artifacts = set(task.setdefault("attributes", {}).get("release_artifacts", [])) 918 for upstream_artifact in worker["upstream-artifacts"]: 919 for path in upstream_artifact["paths"]: 920 artifacts.update( 921 get_signed_artifacts( 922 input=path, 923 formats=upstream_artifact["formats"], 924 behavior=worker.get("mac-behavior"), 925 ) 926 ) 927 task["attributes"]["release_artifacts"] = sorted(list(artifacts)) 928 929 930 @payload_builder( 931 "beetmover", 932 schema={ 933 # the maximum time to run, in seconds 934 Optional("max-run-time"): int, 935 # locale key, if this is a locale beetmover job 936 Optional("locale"): str, 937 Required("release-properties"): { 938 "app-name": str, 939 "app-version": str, 940 "branch": str, 941 "build-id": str, 942 "hash-type": str, 943 "platform": str, 944 }, 945 # list of artifact URLs for the artifacts that should be beetmoved 946 Required("upstream-artifacts"): [ 947 { 948 # taskId of the task with the artifact 949 Required("taskId"): taskref_or_string, 950 # type of signing task (for CoT) 951 Required("taskType"): str, 952 # Paths to the artifacts to sign 953 Required("paths"): [str], 954 # locale is used to map upload path and allow for duplicate simple names 955 Required("locale"): str, 956 } 957 ], 958 Optional("artifact-map"): object, 959 }, 960 ) 961 def build_beetmover_payload(config, task, task_def): 962 worker = task["worker"] 963 release_config = get_release_config(config) 964 release_properties = worker["release-properties"] 965 966 task_def["payload"] = { 967 "releaseProperties": { 968 "appName": release_properties["app-name"], 969 "appVersion": release_properties["app-version"], 970 "branch": release_properties["branch"], 971 "buildid": release_properties["build-id"], 972 "hashType": release_properties["hash-type"], 973 "platform": release_properties["platform"], 974 }, 975 "upload_date": config.params["build_date"], 976 "upstreamArtifacts": worker["upstream-artifacts"], 977 } 978 if worker.get("locale"): 979 task_def["payload"]["locale"] = worker["locale"] 980 if worker.get("artifact-map"): 981 task_def["payload"]["artifactMap"] = worker["artifact-map"] 982 if release_config: 983 task_def["payload"].update(release_config) 984 985 986 @payload_builder( 987 "beetmover-push-to-release", 988 schema={ 989 # the maximum time to run, in seconds 990 Optional("max-run-time"): int, 991 Required("product"): str, 992 }, 993 ) 994 def build_beetmover_push_to_release_payload(config, task, task_def): 995 worker = task["worker"] 996 release_config = get_release_config(config) 997 partners = [f"{p}/{s}" for p, s, _ in get_partners_to_be_published(config)] 998 999 task_def["payload"] = { 1000 "product": worker["product"], 1001 "version": release_config["version"], 1002 "build_number": release_config["build_number"], 1003 "partners": partners, 1004 } 1005 1006 1007 @payload_builder( 1008 "beetmover-import-from-gcs-to-artifact-registry", 1009 schema={ 1010 Optional("max-run-time"): int, 1011 Required("gcs-sources"): [str], 1012 Required("product"): str, 1013 }, 1014 ) 1015 def build_import_from_gcs_to_artifact_registry_payload(config, task, task_def): 1016 task_def["payload"] = { 1017 "product": task["worker"]["product"], 1018 "gcs_sources": task["worker"]["gcs-sources"], 1019 } 1020 1021 1022 @payload_builder( 1023 "beetmover-maven", 1024 schema={ 1025 Optional("max-run-time"): int, 1026 Required("release-properties"): { 1027 "app-name": str, 1028 "app-version": str, 1029 "branch": str, 1030 "build-id": str, 1031 "artifact-id": str, 1032 "hash-type": str, 1033 "platform": str, 1034 }, 1035 Required("upstream-artifacts"): [ 1036 { 1037 Required("taskId"): taskref_or_string, 1038 Required("taskType"): str, 1039 Required("paths"): [str], 1040 Optional("zipExtract"): bool, 1041 } 1042 ], 1043 Optional("artifact-map"): object, 1044 }, 1045 ) 1046 def build_beetmover_maven_payload(config, task, task_def): 1047 build_beetmover_payload(config, task, task_def) 1048 1049 task_def["payload"]["artifact_id"] = task["worker"]["release-properties"][ 1050 "artifact-id" 1051 ] 1052 if task["worker"].get("artifact-map"): 1053 task_def["payload"]["artifactMap"] = task["worker"]["artifact-map"] 1054 1055 task_def["payload"]["version"] = _compute_geckoview_version( 1056 task["worker"]["release-properties"]["app-version"], 1057 task["worker"]["release-properties"]["build-id"], 1058 ) 1059 1060 del task_def["payload"]["releaseProperties"]["hashType"] 1061 del task_def["payload"]["releaseProperties"]["platform"] 1062 1063 1064 @payload_builder( 1065 "balrog", 1066 schema={ 1067 Required("balrog-action"): Any(*BALROG_ACTIONS), 1068 Optional("product"): str, 1069 Optional("platforms"): [str], 1070 Optional("release-eta"): str, 1071 Optional("channel-names"): optionally_keyed_by("release-type", [str]), 1072 Optional("require-mirrors"): bool, 1073 Optional("publish-rules"): optionally_keyed_by( 1074 "release-type", "release-level", [int] 1075 ), 1076 Optional("rules-to-update"): optionally_keyed_by( 1077 "release-type", "release-level", [str] 1078 ), 1079 Optional("archive-domain"): optionally_keyed_by("release-level", str), 1080 Optional("download-domain"): optionally_keyed_by("release-level", str), 1081 Optional("blob-suffix"): str, 1082 Optional("complete-mar-filename-pattern"): str, 1083 Optional("complete-mar-bouncer-product-pattern"): str, 1084 Optional("update-line"): object, 1085 Optional("suffixes"): [str], 1086 Optional("background-rate"): optionally_keyed_by( 1087 "release-type", "beta-number", Any(int, None) 1088 ), 1089 Optional("force-fallback-mapping-update"): optionally_keyed_by( 1090 "release-type", "beta-number", bool 1091 ), 1092 Optional("pin-channels"): optionally_keyed_by( 1093 "release-type", "release-level", [str] 1094 ), 1095 # list of artifact URLs for the artifacts that should be beetmoved 1096 Optional("upstream-artifacts"): [ 1097 { 1098 # taskId of the task with the artifact 1099 Required("taskId"): taskref_or_string, 1100 # type of signing task (for CoT) 1101 Required("taskType"): str, 1102 # Paths to the artifacts to sign 1103 Required("paths"): [str], 1104 } 1105 ], 1106 }, 1107 ) 1108 def build_balrog_payload(config, task, task_def): 1109 worker = task["worker"] 1110 release_config = get_release_config(config) 1111 beta_number = None 1112 if "b" in release_config["version"]: 1113 beta_number = release_config["version"].split("b")[-1] 1114 1115 task_def["payload"] = { 1116 "behavior": worker["balrog-action"], 1117 } 1118 1119 if ( 1120 worker["balrog-action"] == "submit-locale" 1121 or worker["balrog-action"] == "v2-submit-locale" 1122 ): 1123 task_def["payload"].update({ 1124 "upstreamArtifacts": worker["upstream-artifacts"], 1125 "suffixes": worker["suffixes"], 1126 }) 1127 else: 1128 for prop in ( 1129 "archive-domain", 1130 "channel-names", 1131 "download-domain", 1132 "publish-rules", 1133 "rules-to-update", 1134 "background-rate", 1135 "force-fallback-mapping-update", 1136 "pin-channels", 1137 ): 1138 if prop in worker: 1139 resolve_keyed_by( 1140 worker, 1141 prop, 1142 task["description"], 1143 **{ 1144 "release-type": config.params["release_type"], 1145 "release-level": release_level(config.params), 1146 "beta-number": beta_number, 1147 }, 1148 ) 1149 task_def["payload"].update({ 1150 "build_number": release_config["build_number"], 1151 "product": worker["product"], 1152 "version": release_config["version"], 1153 }) 1154 for prop in ( 1155 "blob-suffix", 1156 "complete-mar-filename-pattern", 1157 "complete-mar-bouncer-product-pattern", 1158 "pin-channels", 1159 ): 1160 if prop in worker: 1161 task_def["payload"][prop.replace("-", "_")] = worker[prop] 1162 if ( 1163 worker["balrog-action"] == "submit-toplevel" 1164 or worker["balrog-action"] == "v2-submit-toplevel" 1165 ): 1166 task_def["payload"].update({ 1167 "app_version": release_config["appVersion"], 1168 "archive_domain": worker["archive-domain"], 1169 "channel_names": worker["channel-names"], 1170 "download_domain": worker["download-domain"], 1171 "partial_versions": release_config.get("partial_versions", ""), 1172 "platforms": worker["platforms"], 1173 "rules_to_update": worker["rules-to-update"], 1174 "require_mirrors": worker["require-mirrors"], 1175 "update_line": worker["update-line"], 1176 }) 1177 else: # schedule / ship 1178 task_def["payload"].update({ 1179 "publish_rules": worker["publish-rules"], 1180 "release_eta": worker.get( 1181 "release-eta", config.params.get("release_eta") 1182 ) 1183 or "", 1184 }) 1185 if worker.get("force-fallback-mapping-update"): 1186 task_def["payload"]["force_fallback_mapping_update"] = worker[ 1187 "force-fallback-mapping-update" 1188 ] 1189 if worker.get("background-rate"): 1190 task_def["payload"]["background_rate"] = worker["background-rate"] 1191 1192 1193 @payload_builder( 1194 "bouncer-aliases", 1195 schema={ 1196 Required("entries"): object, 1197 }, 1198 ) 1199 def build_bouncer_aliases_payload(config, task, task_def): 1200 worker = task["worker"] 1201 1202 task_def["payload"] = {"aliases_entries": worker["entries"]} 1203 1204 1205 @payload_builder( 1206 "bouncer-locations", 1207 schema={ 1208 Required("implementation"): "bouncer-locations", 1209 Required("bouncer-products"): [str], 1210 }, 1211 ) 1212 def build_bouncer_locations_payload(config, task, task_def): 1213 worker = task["worker"] 1214 release_config = get_release_config(config) 1215 1216 task_def["payload"] = { 1217 "bouncer_products": worker["bouncer-products"], 1218 "version": release_config["version"], 1219 "product": task["shipping-product"], 1220 } 1221 1222 1223 @payload_builder( 1224 "bouncer-submission", 1225 schema={ 1226 Required("locales"): [str], 1227 Required("entries"): object, 1228 }, 1229 ) 1230 def build_bouncer_submission_payload(config, task, task_def): 1231 worker = task["worker"] 1232 1233 task_def["payload"] = { 1234 "locales": worker["locales"], 1235 "submission_entries": worker["entries"], 1236 } 1237 1238 1239 @payload_builder( 1240 "push-flatpak", 1241 schema={ 1242 Required("channel"): str, 1243 Required("upstream-artifacts"): [ 1244 { 1245 Required("taskId"): taskref_or_string, 1246 Required("taskType"): str, 1247 Required("paths"): [str], 1248 } 1249 ], 1250 }, 1251 ) 1252 def build_push_flatpak_payload(config, task, task_def): 1253 worker = task["worker"] 1254 1255 task_def["payload"] = { 1256 "channel": worker["channel"], 1257 "upstreamArtifacts": worker["upstream-artifacts"], 1258 } 1259 1260 1261 @payload_builder( 1262 "push-msix", 1263 schema={ 1264 Required("channel"): str, 1265 Optional("publish-mode"): str, 1266 Required("upstream-artifacts"): [ 1267 { 1268 Required("taskId"): taskref_or_string, 1269 Required("taskType"): str, 1270 Required("paths"): [str], 1271 } 1272 ], 1273 }, 1274 ) 1275 def build_push_msix_payload(config, task, task_def): 1276 worker = task["worker"] 1277 1278 task_def["payload"] = { 1279 "channel": worker["channel"], 1280 "upstreamArtifacts": worker["upstream-artifacts"], 1281 } 1282 if worker.get("publish-mode"): 1283 task_def["payload"]["publishMode"] = worker["publish-mode"] 1284 1285 1286 @payload_builder( 1287 "shipit-update-product-channel-version", 1288 schema={ 1289 Required("product"): str, 1290 Required("channel"): str, 1291 Required("version"): str, 1292 }, 1293 ) 1294 def build_ship_it_update_product_channel_version_payload(config, task, task_def): 1295 worker = task["worker"] 1296 task_def["payload"] = { 1297 "product": worker["product"], 1298 "version": worker["version"], 1299 "channel": worker["channel"], 1300 } 1301 1302 1303 @payload_builder( 1304 "shipit-shipped", 1305 schema={ 1306 Required("release-name"): str, 1307 }, 1308 ) 1309 def build_ship_it_shipped_payload(config, task, task_def): 1310 worker = task["worker"] 1311 1312 task_def["payload"] = {"release_name": worker["release-name"]} 1313 1314 1315 @payload_builder( 1316 "shipit-maybe-release", 1317 schema={ 1318 Required("phase"): str, 1319 }, 1320 ) 1321 def build_ship_it_maybe_release_payload(config, task, task_def): 1322 # expect branch name, including path 1323 branch = config.params["head_repository"][len("https://hg.mozilla.org/") :] 1324 # 'version' is e.g. '71.0b13' (app_version doesn't have beta number) 1325 version = config.params["version"] 1326 1327 task_def["payload"] = { 1328 "product": task["shipping-product"], 1329 "branch": branch, 1330 "phase": task["worker"]["phase"], 1331 "version": version, 1332 "cron_revision": config.params["head_rev"], 1333 } 1334 1335 1336 @payload_builder( 1337 "push-addons", 1338 schema={ 1339 Required("channel"): Any("listed", "unlisted"), 1340 Required("upstream-artifacts"): [ 1341 { 1342 Required("taskId"): taskref_or_string, 1343 Required("taskType"): str, 1344 Required("paths"): [str], 1345 } 1346 ], 1347 }, 1348 ) 1349 def build_push_addons_payload(config, task, task_def): 1350 worker = task["worker"] 1351 1352 task_def["payload"] = { 1353 "channel": worker["channel"], 1354 "upstreamArtifacts": worker["upstream-artifacts"], 1355 } 1356 1357 1358 @payload_builder( 1359 "treescript", 1360 schema={ 1361 Required("tags"): [Any("buildN", "release", None)], 1362 Required("bump"): bool, 1363 Optional("bump-files"): [str], 1364 Optional("repo-param-prefix"): str, 1365 Optional("dontbuild"): bool, 1366 Optional("ignore-closed-tree"): bool, 1367 Optional("force-dry-run"): bool, 1368 Optional("push"): bool, 1369 Optional("source-repo"): str, 1370 Optional("ssh-user"): str, 1371 Optional("l10n-bump-info"): [ 1372 { 1373 Required("name"): str, 1374 Required("path"): str, 1375 Required("version-path"): str, 1376 Optional("l10n-repo-url"): str, 1377 Optional("l10n-repo-target-branch"): str, 1378 Optional("ignore-config"): object, 1379 Required("platform-configs"): [ 1380 { 1381 Required("platforms"): [str], 1382 Required("path"): str, 1383 Optional("format"): str, 1384 } 1385 ], 1386 } 1387 ], 1388 Optional("actions"): object, 1389 Optional("merge-info"): object, 1390 Optional("android-l10n-import-info"): { 1391 Required("from-repo-url"): str, 1392 Required("toml-info"): [ 1393 { 1394 Required("toml-path"): str, 1395 Required("dest-path"): str, 1396 } 1397 ], 1398 }, 1399 Optional("android-l10n-sync-info"): { 1400 Required("from-repo-url"): str, 1401 Required("toml-info"): [ 1402 { 1403 Required("toml-path"): str, 1404 } 1405 ], 1406 }, 1407 }, 1408 ) 1409 def build_treescript_payload(config, task, task_def): 1410 worker = task["worker"] 1411 release_config = get_release_config(config) 1412 1413 task_def["payload"] = {"actions": []} 1414 actions = task_def["payload"]["actions"] 1415 if worker["tags"]: 1416 tag_names = [] 1417 product = task["shipping-product"].upper() 1418 version = release_config["version"].replace(".", "_") 1419 buildnum = release_config["build_number"] 1420 if "buildN" in worker["tags"]: 1421 tag_names.extend([ 1422 f"{product}_{version}_BUILD{buildnum}", 1423 ]) 1424 if "release" in worker["tags"]: 1425 tag_names.extend([f"{product}_{version}_RELEASE"]) 1426 tag_info = { 1427 "tags": tag_names, 1428 "revision": config.params[ 1429 "{}head_rev".format(worker.get("repo-param-prefix", "")) 1430 ], 1431 } 1432 task_def["payload"]["tag_info"] = tag_info 1433 actions.append("tag") 1434 1435 if worker["bump"]: 1436 if not worker["bump-files"]: 1437 raise Exception("Version Bump requested without bump-files") 1438 1439 bump_info = {} 1440 bump_info["next_version"] = release_config["next_version"] 1441 bump_info["files"] = worker["bump-files"] 1442 task_def["payload"]["version_bump_info"] = bump_info 1443 actions.append("version_bump") 1444 1445 if worker.get("l10n-bump-info"): 1446 l10n_bump_info = [] 1447 l10n_repo_urls = set() 1448 for lbi in worker["l10n-bump-info"]: 1449 new_lbi = {} 1450 if "l10n-repo-url" in lbi: 1451 l10n_repo_urls.add(lbi["l10n-repo-url"]) 1452 for k, v in lbi.items(): 1453 new_lbi[k.replace("-", "_")] = v 1454 l10n_bump_info.append(new_lbi) 1455 1456 task_def["payload"]["l10n_bump_info"] = l10n_bump_info 1457 if len(l10n_repo_urls) > 1: 1458 raise Exception( 1459 "Must use the same l10n-repo-url for all files in the same task!" 1460 ) 1461 elif len(l10n_repo_urls) == 1: 1462 if "github.com" in l10n_repo_urls.pop(): 1463 actions.append("l10n_bump_github") 1464 else: 1465 actions.append("l10n_bump") 1466 1467 if worker.get("merge-info"): 1468 merge_info = { 1469 merge_param_name.replace("-", "_"): merge_param_value 1470 for merge_param_name, merge_param_value in worker["merge-info"].items() 1471 if merge_param_name != "version-files" 1472 } 1473 merge_info["version_files"] = [ 1474 { 1475 file_param_name.replace("-", "_"): file_param_value 1476 for file_param_name, file_param_value in file_entry.items() 1477 } 1478 for file_entry in worker["merge-info"]["version-files"] 1479 ] 1480 task_def["payload"]["merge_info"] = merge_info 1481 actions.append("merge_day") 1482 1483 if worker.get("android-l10n-import-info"): 1484 android_l10n_import_info = {} 1485 for k, v in worker["android-l10n-import-info"].items(): 1486 android_l10n_import_info[k.replace("-", "_")] = worker[ 1487 "android-l10n-import-info" 1488 ][k] 1489 android_l10n_import_info["toml_info"] = [ 1490 { 1491 param_name.replace("-", "_"): param_value 1492 for param_name, param_value in entry.items() 1493 } 1494 for entry in worker["android-l10n-import-info"]["toml-info"] 1495 ] 1496 task_def["payload"]["android_l10n_import_info"] = android_l10n_import_info 1497 actions.append("android_l10n_import") 1498 1499 if worker.get("android-l10n-sync-info"): 1500 android_l10n_sync_info = {} 1501 for k, v in worker["android-l10n-sync-info"].items(): 1502 android_l10n_sync_info[k.replace("-", "_")] = worker[ 1503 "android-l10n-sync-info" 1504 ][k] 1505 android_l10n_sync_info["toml_info"] = [ 1506 { 1507 param_name.replace("-", "_"): param_value 1508 for param_name, param_value in entry.items() 1509 } 1510 for entry in worker["android-l10n-sync-info"]["toml-info"] 1511 ] 1512 task_def["payload"]["android_l10n_sync_info"] = android_l10n_sync_info 1513 actions.append("android_l10n_sync") 1514 1515 if worker["push"]: 1516 actions.append("push") 1517 1518 if worker.get("force-dry-run"): 1519 task_def["payload"]["dry_run"] = True 1520 1521 if worker.get("dontbuild"): 1522 task_def["payload"]["dontbuild"] = True 1523 1524 if worker.get("ignore-closed-tree") is not None: 1525 task_def["payload"]["ignore_closed_tree"] = worker["ignore-closed-tree"] 1526 1527 if worker.get("source-repo"): 1528 task_def["payload"]["source_repo"] = worker["source-repo"] 1529 1530 if worker.get("ssh-user"): 1531 task_def["payload"]["ssh_user"] = worker["ssh-user"] 1532 1533 1534 @payload_builder( 1535 "landoscript", 1536 schema={ 1537 Required("lando-repo"): str, 1538 Optional("hg-repo-url"): str, 1539 Optional("ignore-closed-tree"): bool, 1540 Optional("dontbuild"): bool, 1541 Optional("tags"): [Any("buildN", "release", None)], 1542 Optional("force-dry-run"): bool, 1543 Optional("push"): bool, 1544 Optional("android-l10n-import-info"): { 1545 Required("from-repo-url"): str, 1546 Required("toml-info"): [ 1547 { 1548 Required("toml-path"): str, 1549 Required("dest-path"): str, 1550 } 1551 ], 1552 }, 1553 Optional("android-l10n-sync-info"): { 1554 Required("from-branch"): str, 1555 Required("toml-info"): [ 1556 { 1557 Required("toml-path"): str, 1558 } 1559 ], 1560 }, 1561 Optional("l10n-bump-info"): [ 1562 { 1563 Required("name"): str, 1564 Required("path"): str, 1565 Optional("l10n-repo-url"): str, 1566 Optional("l10n-repo-target-branch"): str, 1567 Optional("ignore-config"): object, 1568 Required("platform-configs"): [ 1569 { 1570 Required("platforms"): [str], 1571 Required("path"): str, 1572 Optional("format"): str, 1573 } 1574 ], 1575 } 1576 ], 1577 Optional("bump-files"): [str], 1578 Optional("merge-info"): object, 1579 }, 1580 ) 1581 def build_landoscript_payload(config, task, task_def): 1582 worker = task["worker"] 1583 release_config = get_release_config(config) 1584 task_def["payload"] = {"actions": [], "lando_repo": worker["lando-repo"]} 1585 actions = task_def["payload"]["actions"] 1586 1587 if worker.get("ignore-closed-tree") is not None: 1588 task_def["payload"]["ignore_closed_tree"] = worker["ignore-closed-tree"] 1589 1590 if worker.get("dontbuild"): 1591 task_def["payload"]["dontbuild"] = True 1592 1593 if worker.get("force-dry-run"): 1594 task_def["payload"]["dry_run"] = True 1595 1596 if worker.get("android-l10n-import-info"): 1597 android_l10n_import_info = {} 1598 for k, v in worker["android-l10n-import-info"].items(): 1599 android_l10n_import_info[k.replace("-", "_")] = worker[ 1600 "android-l10n-import-info" 1601 ][k] 1602 android_l10n_import_info["toml_info"] = [ 1603 { 1604 param_name.replace("-", "_"): param_value 1605 for param_name, param_value in entry.items() 1606 } 1607 for entry in worker["android-l10n-import-info"]["toml-info"] 1608 ] 1609 task_def["payload"]["android_l10n_import_info"] = android_l10n_import_info 1610 actions.append("android_l10n_import") 1611 1612 if worker.get("android-l10n-sync-info"): 1613 android_l10n_sync_info = {} 1614 for k, v in worker["android-l10n-sync-info"].items(): 1615 android_l10n_sync_info[k.replace("-", "_")] = worker[ 1616 "android-l10n-sync-info" 1617 ][k] 1618 android_l10n_sync_info["toml_info"] = [ 1619 { 1620 param_name.replace("-", "_"): param_value 1621 for param_name, param_value in entry.items() 1622 } 1623 for entry in worker["android-l10n-sync-info"]["toml-info"] 1624 ] 1625 task_def["payload"]["android_l10n_sync_info"] = android_l10n_sync_info 1626 actions.append("android_l10n_sync") 1627 1628 if worker.get("l10n-bump-info"): 1629 l10n_bump_info = [] 1630 l10n_repo_urls = set() 1631 for lbi in worker["l10n-bump-info"]: 1632 new_lbi = {} 1633 if "l10n-repo-url" in lbi: 1634 l10n_repo_urls.add(lbi["l10n-repo-url"]) 1635 for k, v in lbi.items(): 1636 new_lbi[k.replace("-", "_")] = v 1637 l10n_bump_info.append(new_lbi) 1638 1639 task_def["payload"]["l10n_bump_info"] = l10n_bump_info 1640 if len(l10n_repo_urls) > 1: 1641 raise Exception( 1642 "Must use the same l10n-repo-url for all files in the same task!" 1643 ) 1644 elif len(l10n_repo_urls) == 1: 1645 actions.append("l10n_bump") 1646 1647 if worker.get("tags"): 1648 tag_names = [] 1649 product = task["shipping-product"].upper() 1650 version = release_config["version"].replace(".", "_") 1651 buildnum = release_config["build_number"] 1652 if "buildN" in worker["tags"]: 1653 tag_names.extend([ 1654 f"{product}_{version}_BUILD{buildnum}", 1655 ]) 1656 if "release" in worker["tags"]: 1657 tag_names.extend([f"{product}_{version}_RELEASE"]) 1658 tag_info = { 1659 "tags": tag_names, 1660 "hg_repo_url": worker["hg-repo-url"], 1661 "revision": config.params[ 1662 "{}head_rev".format(worker.get("repo-param-prefix", "")) 1663 ], 1664 } 1665 task_def["payload"]["tag_info"] = tag_info 1666 actions.append("tag") 1667 1668 if worker.get("bump-files"): 1669 bump_info = {} 1670 bump_info["next_version"] = release_config["next_version"] 1671 bump_info["files"] = worker["bump-files"] 1672 task_def["payload"]["version_bump_info"] = bump_info 1673 actions.append("version_bump") 1674 1675 if worker.get("merge-info"): 1676 merge_info = { 1677 merge_param_name.replace("-", "_"): merge_param_value 1678 for merge_param_name, merge_param_value in worker["merge-info"].items() 1679 if merge_param_name != "version-files" 1680 } 1681 merge_info["version_files"] = [ 1682 { 1683 file_param_name.replace("-", "_"): file_param_value 1684 for file_param_name, file_param_value in file_entry.items() 1685 } 1686 for file_entry in worker["merge-info"]["version-files"] 1687 ] 1688 # hack alert: co-opt the l10n_bump_info into the merge_info section 1689 # this should be cleaned up to avoid l10n_bump_info ever existing 1690 # in the payload 1691 if task_def["payload"].get("l10n_bump_info"): 1692 actions.remove("l10n_bump") 1693 merge_info["l10n_bump_info"] = task_def["payload"].pop("l10n_bump_info") 1694 1695 task_def["payload"]["merge_info"] = merge_info 1696 actions.append("merge_day") 1697 1698 scopes = set(task_def.get("scopes", [])) 1699 scopes.add(f"project:releng:lando:repo:{worker['lando-repo']}") 1700 scopes.update([f"project:releng:lando:action:{action}" for action in actions]) 1701 task_def["scopes"] = sorted(scopes) 1702 1703 1704 transforms = TransformSequence() 1705 1706 1707 @transforms.add 1708 def set_implementation(config, tasks): 1709 """ 1710 Set the worker implementation based on the worker-type alias. 1711 """ 1712 for task in tasks: 1713 default_worker_implementation, default_os = worker_type_implementation( 1714 config.graph_config, config.params, task["worker-type"] 1715 ) 1716 1717 worker = task.setdefault("worker", {}) 1718 tags = task.setdefault("tags", {}) 1719 1720 worker_implementation = worker.get( 1721 "implementation", default_worker_implementation 1722 ) 1723 tag_worker_implementation = _get_worker_implementation_tag( 1724 config, task["worker-type"], worker_implementation 1725 ) 1726 if worker_implementation: 1727 worker["implementation"] = worker_implementation 1728 tags["worker-implementation"] = tag_worker_implementation 1729 1730 os = worker.get("os", default_os) 1731 if os: 1732 tags["os"] = os 1733 worker["os"] = os 1734 1735 yield task 1736 1737 1738 def _get_worker_implementation_tag(config, task_worker_type, worker_implementation): 1739 # Scriptworkers have different types of payload and each sets its own 1740 # worker-implementation. Per bug 1955941, we want to bundle them all in one category 1741 # through their tags. 1742 provisioner_id, _ = get_worker_type( 1743 config.graph_config, 1744 config.params, 1745 task_worker_type, 1746 ) 1747 if provisioner_id in ("scriptworker-k8s", "scriptworker-prov-v1"): 1748 return "scriptworker" 1749 1750 return worker_implementation 1751 1752 1753 @transforms.add 1754 def set_defaults(config, tasks): 1755 for task in tasks: 1756 task.setdefault("shipping-phase", None) 1757 task.setdefault("shipping-product", None) 1758 task.setdefault("always-target", False) 1759 task.setdefault("optimization", None) 1760 task.setdefault("use-sccache", False) 1761 1762 worker = task["worker"] 1763 if worker["implementation"] in ("docker-worker",): 1764 worker.setdefault("chain-of-trust", False) 1765 worker.setdefault("taskcluster-proxy", False) 1766 worker.setdefault("allow-ptrace", True) 1767 worker.setdefault("loopback-video", False) 1768 worker.setdefault("loopback-audio", False) 1769 worker.setdefault("docker-in-docker", False) 1770 worker.setdefault("privileged", False) 1771 worker.setdefault("volumes", []) 1772 worker.setdefault("env", {}) 1773 if "caches" in worker: 1774 for c in worker["caches"]: 1775 c.setdefault("skip-untrusted", False) 1776 elif worker["implementation"] == "generic-worker": 1777 worker.setdefault("env", {}) 1778 worker.setdefault("os-groups", []) 1779 if worker["os-groups"] and worker["os"] not in ( 1780 "windows", 1781 "linux", 1782 ): 1783 raise Exception( 1784 "os-groups feature of generic-worker is only supported on " 1785 "Windows and Linux, not on {}".format(worker["os"]) 1786 ) 1787 worker.setdefault("chain-of-trust", False) 1788 elif worker["implementation"] in ("iscript",): 1789 worker.setdefault("max-run-time", 600) 1790 elif worker["implementation"] == "push-apk": 1791 worker.setdefault("commit", False) 1792 1793 yield task 1794 1795 1796 @transforms.add 1797 def setup_raptor(config, tasks): 1798 """Add options that are specific to raptor jobs (identified by suite=raptor). 1799 1800 This variant uses a separate set of transforms for manipulating the tests at the 1801 task-level. Currently only used for setting the taskcluster proxy setting and 1802 the scopes required for perftest secrets. 1803 """ 1804 from gecko_taskgraph.transforms.test.raptor import ( 1805 task_transforms as raptor_transforms, 1806 ) 1807 1808 for task in tasks: 1809 if task.get("extra", {}).get("suite", "") != "raptor": 1810 yield task 1811 continue 1812 1813 yield from raptor_transforms(config, [task]) 1814 1815 1816 @transforms.add 1817 def task_name_from_label(config, tasks): 1818 for task in tasks: 1819 taskname = task.pop("name", None) 1820 if "label" not in task: 1821 if taskname is None: 1822 raise Exception("task has neither a name nor a label") 1823 task["label"] = f"{config.kind}-{taskname}" 1824 yield task 1825 1826 1827 UNSUPPORTED_SHIPPING_PRODUCT_ERROR = """\ 1828 The shipping product {product} is not in the list of configured products in 1829 `taskcluster/config.yml'. 1830 """ 1831 1832 1833 def validate_shipping_product(config, product): 1834 if product not in config.graph_config["release-promotion"]["products"]: 1835 raise Exception(UNSUPPORTED_SHIPPING_PRODUCT_ERROR.format(product=product)) 1836 1837 1838 @transforms.add 1839 def validate(config, tasks): 1840 for task in tasks: 1841 validate_schema( 1842 task_description_schema, 1843 task, 1844 "In task {!r}:".format(task.get("label", "?no-label?")), 1845 ) 1846 validate_schema( 1847 payload_builders[task["worker"]["implementation"]].schema, 1848 task["worker"], 1849 "In task.run {!r}:".format(task.get("label", "?no-label?")), 1850 ) 1851 if task["shipping-product"] is not None: 1852 validate_shipping_product(config, task["shipping-product"]) 1853 yield task 1854 1855 1856 @index_builder("generic") 1857 def add_generic_index_routes(config, task): 1858 index = task.get("index") 1859 routes = task.setdefault("routes", []) 1860 1861 verify_index(config, index) 1862 1863 subs = config.params.copy() 1864 subs["job-name"] = index["job-name"] 1865 subs["build_date_long"] = time.strftime( 1866 "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"]) 1867 ) 1868 subs["build_date"] = time.strftime( 1869 "%Y.%m.%d", time.gmtime(config.params["build_date"]) 1870 ) 1871 subs["product"] = index["product"] 1872 subs["trust-domain"] = config.graph_config["trust-domain"] 1873 subs["branch_rev"] = get_branch_rev(config) 1874 try: 1875 subs["branch_git_rev"] = get_branch_git_rev(config) 1876 except KeyError: 1877 pass 1878 1879 subs["project"] = get_project_alias(config) 1880 1881 project = config.params.get("project") 1882 1883 for tpl in V2_ROUTE_TEMPLATES: 1884 try: 1885 routes.append(tpl.format(**subs)) 1886 except KeyError: 1887 # Ignore errors that arise from branch_git_rev not being set. 1888 pass 1889 1890 # Additionally alias all tasks for "trunk" repos into a common 1891 # namespace. 1892 if project and project in TRUNK_PROJECTS: 1893 for tpl in V2_TRUNK_ROUTE_TEMPLATES: 1894 routes.append(tpl.format(**subs)) 1895 1896 return task 1897 1898 1899 @index_builder("shippable") 1900 def add_shippable_index_routes(config, task): 1901 index = task.get("index") 1902 routes = task.setdefault("routes", []) 1903 1904 verify_index(config, index) 1905 1906 subs = config.params.copy() 1907 subs["job-name"] = index["job-name"] 1908 subs["build_date_long"] = time.strftime( 1909 "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"]) 1910 ) 1911 subs["build_date"] = time.strftime( 1912 "%Y.%m.%d", time.gmtime(config.params["build_date"]) 1913 ) 1914 subs["product"] = index["product"] 1915 subs["trust-domain"] = config.graph_config["trust-domain"] 1916 subs["branch_rev"] = get_branch_rev(config) 1917 try: 1918 subs["branch_git_rev"] = get_branch_git_rev(config) 1919 except KeyError: 1920 pass 1921 subs["project"] = get_project_alias(config) 1922 1923 for tpl in V2_SHIPPABLE_TEMPLATES: 1924 try: 1925 routes.append(tpl.format(**subs)) 1926 except KeyError: 1927 # Ignore errors that arise from branch_git_rev not being set. 1928 pass 1929 1930 # Also add routes for en-US 1931 task = add_shippable_l10n_index_routes(config, task, force_locale="en-US") 1932 1933 return task 1934 1935 1936 @index_builder("shippable-with-multi-l10n") 1937 def add_shippable_multi_index_routes(config, task): 1938 task = add_shippable_index_routes(config, task) 1939 task = add_l10n_index_routes(config, task, force_locale="multi") 1940 return task 1941 1942 1943 @index_builder("l10n") 1944 def add_l10n_index_routes(config, task, force_locale=None): 1945 index = task.get("index") 1946 routes = task.setdefault("routes", []) 1947 1948 verify_index(config, index) 1949 1950 subs = config.params.copy() 1951 subs["job-name"] = index["job-name"] 1952 subs["build_date_long"] = time.strftime( 1953 "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"]) 1954 ) 1955 subs["product"] = index["product"] 1956 subs["trust-domain"] = config.graph_config["trust-domain"] 1957 subs["branch_rev"] = get_branch_rev(config) 1958 1959 locales = task["attributes"].get( 1960 "chunk_locales", task["attributes"].get("all_locales") 1961 ) 1962 # Some tasks has only one locale set 1963 if task["attributes"].get("locale"): 1964 locales = [task["attributes"]["locale"]] 1965 1966 if force_locale: 1967 # Used for en-US and multi-locale 1968 locales = [force_locale] 1969 1970 if not locales: 1971 raise Exception("Error: Unable to use l10n index for tasks without locales") 1972 1973 # If there are too many locales, we can't write a route for all of them 1974 # See Bug 1323792 1975 if len(locales) > 18: # 18 * 3 = 54, max routes = 64 1976 return task 1977 1978 for locale in locales: 1979 for tpl in V2_L10N_TEMPLATES: 1980 routes.append(tpl.format(locale=locale, **subs)) 1981 1982 return task 1983 1984 1985 @index_builder("shippable-l10n") 1986 def add_shippable_l10n_index_routes(config, task, force_locale=None): 1987 index = task.get("index") 1988 routes = task.setdefault("routes", []) 1989 1990 verify_index(config, index) 1991 1992 subs = config.params.copy() 1993 subs["job-name"] = index["job-name"] 1994 subs["build_date_long"] = time.strftime( 1995 "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"]) 1996 ) 1997 subs["product"] = index["product"] 1998 subs["trust-domain"] = config.graph_config["trust-domain"] 1999 subs["branch_rev"] = get_branch_rev(config) 2000 subs["project"] = get_project_alias(config) 2001 2002 locales = task["attributes"].get( 2003 "chunk_locales", task["attributes"].get("all_locales") 2004 ) 2005 # Some tasks has only one locale set 2006 if task["attributes"].get("locale"): 2007 locales = [task["attributes"]["locale"]] 2008 2009 if force_locale: 2010 # Used for en-US and multi-locale 2011 locales = [force_locale] 2012 2013 if not locales: 2014 raise Exception("Error: Unable to use l10n index for tasks without locales") 2015 2016 # If there are too many locales, we can't write a route for all of them 2017 # See Bug 1323792 2018 if len(locales) > 18: # 18 * 3 = 54, max routes = 64 2019 return task 2020 2021 for locale in locales: 2022 for tpl in V2_SHIPPABLE_L10N_TEMPLATES: 2023 routes.append(tpl.format(locale=locale, **subs)) 2024 2025 return task 2026 2027 2028 def add_geckoview_index_routes(config, task): 2029 index = task.get("index") 2030 routes = task.setdefault("routes", []) 2031 geckoview_version = _compute_geckoview_version( 2032 config.params["app_version"], config.params["moz_build_date"] 2033 ) 2034 2035 subs = { 2036 "geckoview-version": geckoview_version, 2037 "job-name": index["job-name"], 2038 "product": index["product"], 2039 "project": config.params["project"], 2040 "trust-domain": config.graph_config["trust-domain"], 2041 } 2042 routes.append(V2_GECKOVIEW_RELEASE.format(**subs)) 2043 2044 return task 2045 2046 2047 @index_builder("android-shippable") 2048 def add_android_shippable_index_routes(config, task): 2049 task = add_shippable_index_routes(config, task) 2050 task = add_geckoview_index_routes(config, task) 2051 2052 return task 2053 2054 2055 @index_builder("android-shippable-with-multi-l10n") 2056 def add_android_shippable_multi_index_routes(config, task): 2057 task = add_shippable_multi_index_routes(config, task) 2058 task = add_geckoview_index_routes(config, task) 2059 2060 return task 2061 2062 2063 @transforms.add 2064 def add_index_routes(config, tasks): 2065 for task in tasks: 2066 index = task.get("index", {}) 2067 2068 # The default behavior is to rank tasks according to their tier 2069 extra_index = task.setdefault("extra", {}).setdefault("index", {}) 2070 rank = index.get("rank", "by-tier") 2071 2072 if rank == "by-tier": 2073 # rank is one for non-tier-1 tasks and based on pushid for others; 2074 # this sorts tier-{2,3} builds below tier-1 in the index, but above 2075 # eager-index 2076 tier = task.get("treeherder", {}).get("tier", 3) 2077 extra_index["rank"] = 1 if tier > 1 else int(config.params["build_date"]) 2078 elif rank == "build_date": 2079 extra_index["rank"] = int(config.params["build_date"]) 2080 else: 2081 extra_index["rank"] = rank 2082 2083 if not index: 2084 yield task 2085 continue 2086 2087 index_type = index.get("type", "generic") 2088 task = index_builders[index_type](config, task) 2089 2090 del task["index"] 2091 yield task 2092 2093 2094 @transforms.add 2095 def try_task_config_env(config, tasks): 2096 """Set environment variables in the task.""" 2097 env = config.params["try_task_config"].get("env") 2098 if not env: 2099 yield from tasks 2100 return 2101 2102 # Find all implementations that have an 'env' key. 2103 implementations = { 2104 name 2105 for name, builder in payload_builders.items() 2106 if "env" in builder.schema.schema 2107 } 2108 for task in tasks: 2109 if task["worker"]["implementation"] in implementations: 2110 task["worker"]["env"].update(env) 2111 yield task 2112 2113 2114 @transforms.add 2115 def try_task_config_priority(config, tasks): 2116 """Change priority based on the try_task_config.""" 2117 priority = config.params["try_task_config"].get("priority") 2118 if not priority: 2119 yield from tasks 2120 return 2121 2122 for task in tasks: 2123 task["priority"] = priority 2124 yield task 2125 2126 2127 @transforms.add 2128 def try_task_config_routes(config, tasks): 2129 """Set routes in the task.""" 2130 routes = config.params["try_task_config"].get("routes") 2131 for task in tasks: 2132 if routes: 2133 task_routes = task.setdefault("routes", []) 2134 task_routes.extend(routes) 2135 yield task 2136 2137 2138 @transforms.add 2139 def set_task_and_artifact_expiry(config, jobs): 2140 """Set the default expiry for tasks and their artifacts. 2141 2142 These values are read from ci/config.yml 2143 """ 2144 now = datetime.datetime.utcnow() 2145 # We don't want any configuration leading to anything with an expiry longer 2146 # than 28 days on try. 2147 cap = ( 2148 "28 days" 2149 if is_try(config.params) and int(config.params["level"]) == 1 2150 else None 2151 ) 2152 cap_from_now = fromNow(cap, now) if cap else None 2153 for job in jobs: 2154 expires = get_expiration(config, job.get("expiration-policy", "default")) 2155 job_expiry = job.setdefault("expires-after", expires) 2156 job_expiry_from_now = fromNow(job_expiry, now) 2157 if cap and job_expiry_from_now > cap_from_now: 2158 job_expiry, job_expiry_from_now = cap, cap_from_now 2159 # If the task has no explicit expiration-policy, but has an expires-after, 2160 # we use that as the default artifact expiry. 2161 artifact_expires = expires if "expiration-policy" in job else job_expiry 2162 2163 for artifact in job["worker"].get("artifacts", ()): 2164 artifact_expiry = artifact.setdefault("expires-after", artifact_expires) 2165 2166 # By using > instead of >=, there's a chance of mismatch 2167 # where the artifact expires sooner than the task. 2168 # There is no chance, however, of mismatch where artifacts 2169 # expire _after_ the task. 2170 # Currently this leads to some build tasks having logs 2171 # that expire in 1 year while the task expires in 3 years. 2172 if fromNow(artifact_expiry, now) > job_expiry_from_now: 2173 artifact["expires-after"] = job_expiry 2174 2175 yield job 2176 2177 2178 def group_name_variant(group_names, groupSymbol): 2179 # iterate through variants, allow for Base-[variant_list] 2180 # sorting longest->shortest allows for finding variants when 2181 # other variants have a suffix that is a subset 2182 variant_symbols = sorted( 2183 [ 2184 ( 2185 v, 2186 TEST_VARIANTS[v]["suffix"], 2187 TEST_VARIANTS[v].get("description", "{description}"), 2188 ) 2189 for v in TEST_VARIANTS 2190 if TEST_VARIANTS[v].get("suffix", "") 2191 ], 2192 key=lambda tup: len(tup[1]), 2193 reverse=True, 2194 ) 2195 2196 # strip known variants 2197 # build a list of known variants 2198 base_symbol = groupSymbol 2199 found_variants = [] 2200 for variant, suffix, description in variant_symbols: 2201 if f"-{suffix}" in base_symbol: 2202 base_symbol = base_symbol.replace(f"-{suffix}", "") 2203 found_variants.append((variant, description)) 2204 2205 if base_symbol not in group_names: 2206 return "" 2207 2208 description = group_names[base_symbol] 2209 for variant, desc in found_variants: 2210 description = desc.format(description=description) 2211 2212 return description 2213 2214 2215 @transforms.add 2216 def build_task(config, tasks): 2217 for task in tasks: 2218 level = str(config.params["level"]) 2219 2220 task_worker_type = task["worker-type"] 2221 worker_overrides = config.params["try_task_config"].get("worker-overrides", {}) 2222 if task_worker_type in worker_overrides: 2223 worker_pool = worker_overrides[task_worker_type] 2224 provisioner_id, worker_type = worker_pool.split("/", 1) 2225 else: 2226 provisioner_id, worker_type = get_worker_type( 2227 config.graph_config, 2228 config.params, 2229 task_worker_type, 2230 ) 2231 task["worker-type"] = "/".join([provisioner_id, worker_type]) 2232 project = config.params["project"] 2233 2234 routes = task.get("routes", []) 2235 scopes = [ 2236 s.format(level=level, project=project) for s in task.get("scopes", []) 2237 ] 2238 2239 # set up extra 2240 extra = task.get("extra", {}) 2241 extra["parent"] = {"task-reference": "<decision>"} 2242 task_th = task.get("treeherder") 2243 if task_th: 2244 extra.setdefault("treeherder-platform", task_th["platform"]) 2245 treeherder = extra.setdefault("treeherder", {}) 2246 2247 machine_platform, collection = task_th["platform"].split("/", 1) 2248 treeherder["machine"] = {"platform": machine_platform} 2249 treeherder["collection"] = {collection: True} 2250 2251 group_names = config.graph_config["treeherder"]["group-names"] 2252 groupSymbol, symbol = split_symbol(task_th["symbol"]) 2253 if groupSymbol != "?": 2254 treeherder["groupSymbol"] = groupSymbol 2255 description = group_names.get( 2256 groupSymbol, group_name_variant(group_names, groupSymbol) 2257 ) 2258 if not description: 2259 path = os.path.join(config.path, task.get("task-from", "")) 2260 raise Exception(UNKNOWN_GROUP_NAME.format(groupSymbol, path)) 2261 treeherder["groupName"] = description 2262 treeherder["symbol"] = symbol 2263 if len(symbol) > 25 or len(groupSymbol) > 25: 2264 raise RuntimeError( 2265 "Treeherder group and symbol names must not be longer than " 2266 "25 characters: {} (see {})".format( 2267 task_th["symbol"], 2268 TC_TREEHERDER_SCHEMA_URL, 2269 ) 2270 ) 2271 treeherder["jobKind"] = task_th["kind"] 2272 treeherder["tier"] = task_th["tier"] 2273 2274 branch_rev = get_branch_rev(config) 2275 2276 routes.append( 2277 f"{TREEHERDER_ROUTE_ROOT}.v2.{get_project_alias(config)}.{branch_rev}" 2278 ) 2279 2280 if "deadline-after" not in task: 2281 task["deadline-after"] = "1 day" 2282 2283 if "priority" not in task: 2284 task["priority"] = get_default_priority( 2285 config.graph_config, config.params["project"] 2286 ) 2287 2288 tags = task.get("tags", {}) 2289 attributes = task.get("attributes", {}) 2290 2291 tags.update({ 2292 "createdForUser": config.params["owner"], 2293 "kind": config.kind, 2294 "label": task["label"], 2295 "retrigger": "true" if attributes.get("retrigger", False) else "false", 2296 "project": config.params["project"], 2297 "trust-domain": config.graph_config["trust-domain"], 2298 }) 2299 2300 task_def = { 2301 "provisionerId": provisioner_id, 2302 "workerType": worker_type, 2303 "routes": routes, 2304 "created": {"relative-datestamp": "0 seconds"}, 2305 "deadline": {"relative-datestamp": task["deadline-after"]}, 2306 "expires": {"relative-datestamp": task["expires-after"]}, 2307 "scopes": scopes, 2308 "metadata": { 2309 "description": task["description"], 2310 "name": task["label"], 2311 "owner": config.params["owner"], 2312 "source": config.params.file_url(config.path, pretty=True), 2313 }, 2314 "extra": extra, 2315 "tags": tags, 2316 "priority": task["priority"], 2317 } 2318 2319 if task.get("requires", None): 2320 task_def["requires"] = task["requires"] 2321 if task.get("retries") is not None: 2322 task_def["retries"] = task["retries"] 2323 2324 if task_th: 2325 # link back to treeherder in description 2326 th_job_link = ( 2327 "https://treeherder.mozilla.org/#/jobs?repo={}&revision={}&selectedTaskRun=<self>" 2328 ).format(config.params["project"], branch_rev) 2329 task_def["metadata"]["description"] = { 2330 "task-reference": "{description} ([Treeherder job]({th_job_link}))".format( 2331 description=task_def["metadata"]["description"], 2332 th_job_link=th_job_link, 2333 ) 2334 } 2335 2336 # add the payload and adjust anything else as required (e.g., scopes) 2337 payload_builders[task["worker"]["implementation"]].builder( 2338 config, task, task_def 2339 ) 2340 2341 # Resolve run-on-projects 2342 build_platform = attributes.get("build_platform") 2343 resolve_keyed_by( 2344 task, 2345 "run-on-projects", 2346 item_name=task["label"], 2347 **{"build-platform": build_platform}, 2348 ) 2349 attributes["run_on_repo_type"] = task.get("run-on-repo-type", ["git", "hg"]) 2350 attributes["run_on_projects"] = task.get("run-on-projects", ["all"]) 2351 2352 # We don't want to pollute non git repos with this attribute. Moreover, target_tasks 2353 # already assumes the default value is ['all'] 2354 if task.get("run-on-git-branches"): 2355 attributes["run_on_git_branches"] = task["run-on-git-branches"] 2356 2357 attributes["always_target"] = task["always-target"] 2358 # This logic is here since downstream tasks don't always match their 2359 # upstream dependency's shipping_phase. 2360 # A text_type task['shipping-phase'] takes precedence, then 2361 # an existing attributes['shipping_phase'], then fall back to None. 2362 if task.get("shipping-phase") is not None: 2363 attributes["shipping_phase"] = task["shipping-phase"] 2364 else: 2365 attributes.setdefault("shipping_phase", None) 2366 # shipping_product will always match the upstream task's 2367 # shipping_product, so a pre-set existing attributes['shipping_product'] 2368 # takes precedence over task['shipping-product']. However, make sure 2369 # we don't have conflicting values. 2370 if task.get("shipping-product") and attributes.get("shipping_product") not in ( 2371 None, 2372 task["shipping-product"], 2373 ): 2374 raise Exception( 2375 "{} shipping_product {} doesn't match task shipping-product {}!".format( 2376 task["label"], 2377 attributes["shipping_product"], 2378 task["shipping-product"], 2379 ) 2380 ) 2381 attributes.setdefault("shipping_product", task["shipping-product"]) 2382 2383 # Set some MOZ_* settings on all jobs. 2384 if task["worker"]["implementation"] in ( 2385 "generic-worker", 2386 "docker-worker", 2387 ): 2388 payload = task_def.get("payload") 2389 if payload: 2390 env = payload.setdefault("env", {}) 2391 env.update({ 2392 "MOZ_AUTOMATION": "1", 2393 "MOZ_BUILD_DATE": config.params["moz_build_date"], 2394 "MOZ_SCM_LEVEL": config.params["level"], 2395 "MOZ_SOURCE_CHANGESET": get_branch_rev(config), 2396 "MOZ_SOURCE_REPO": get_branch_repo(config), 2397 }) 2398 2399 dependencies = task.get("dependencies", {}) 2400 if_dependencies = task.get("if-dependencies", []) 2401 if if_dependencies: 2402 for i, dep in enumerate(if_dependencies): 2403 if dep in dependencies: 2404 if_dependencies[i] = dependencies[dep] 2405 continue 2406 2407 raise Exception( 2408 "{label} specifies '{dep}' in if-dependencies, " 2409 "but {dep} is not a dependency!".format( 2410 label=task["label"], dep=dep 2411 ) 2412 ) 2413 2414 yield { 2415 "label": task["label"], 2416 "description": task["description"], 2417 "task": task_def, 2418 "dependencies": dependencies, 2419 "if-dependencies": if_dependencies, 2420 "soft-dependencies": task.get("soft-dependencies", []), 2421 "attributes": attributes, 2422 "optimization": task.get("optimization", None), 2423 } 2424 2425 2426 @transforms.add 2427 def chain_of_trust(config, tasks): 2428 for task in tasks: 2429 if task["task"].get("payload", {}).get("features", {}).get("chainOfTrust"): 2430 image = task.get("dependencies", {}).get("docker-image") 2431 if image: 2432 cot = ( 2433 task["task"].setdefault("extra", {}).setdefault("chainOfTrust", {}) 2434 ) 2435 cot.setdefault("inputs", {})["docker-image"] = { 2436 "task-reference": "<docker-image>" 2437 } 2438 yield task 2439 2440 2441 @transforms.add 2442 def check_task_identifiers(config, tasks): 2443 """Ensures that all tasks have well defined identifiers: 2444 ``^[a-zA-Z0-9_-]{1,38}$`` 2445 """ 2446 e = re.compile("^[a-zA-Z0-9_-]{1,38}$") 2447 for task in tasks: 2448 for attrib in ("workerType", "provisionerId"): 2449 if not e.match(task["task"][attrib]): 2450 raise Exception( 2451 "task {}.{} is not a valid identifier: {}".format( 2452 task["label"], attrib, task["task"][attrib] 2453 ) 2454 ) 2455 yield task 2456 2457 2458 @transforms.add 2459 def check_task_dependencies(config, tasks): 2460 """Ensures that tasks don't have more than 100 dependencies.""" 2461 for task in tasks: 2462 if len(task["dependencies"]) > MAX_DEPENDENCIES: 2463 raise Exception( 2464 "task {}/{} has too many dependencies ({} > {})".format( 2465 config.kind, 2466 task["label"], 2467 len(task["dependencies"]), 2468 MAX_DEPENDENCIES, 2469 ) 2470 ) 2471 yield task 2472 2473 2474 @transforms.add 2475 def check_perf_task_fission_filtering(config, tasks): 2476 for task in tasks: 2477 if ( 2478 ("chrome-m" in task["label"] or "cstm-car-m" in task["label"]) 2479 and "nofis" not in task["label"] 2480 and "android" in task["label"] 2481 and "startup" not in task["label"] 2482 ): 2483 continue 2484 yield task 2485 2486 2487 def check_caches_are_volumes(task): 2488 """Ensures that all cache paths are defined as volumes. 2489 2490 Caches and volumes are the only filesystem locations whose content 2491 isn't defined by the Docker image itself. Some caches are optional 2492 depending on the job environment. We want paths that are potentially 2493 caches to have as similar behavior regardless of whether a cache is 2494 used. To help enforce this, we require that all paths used as caches 2495 to be declared as Docker volumes. This check won't catch all offenders. 2496 But it is better than nothing. 2497 """ 2498 volumes = {s for s in task["worker"]["volumes"]} 2499 paths = {c["mount-point"] for c in task["worker"].get("caches", [])} 2500 missing = paths - volumes 2501 2502 if not missing: 2503 return 2504 2505 raise Exception( 2506 "task %s (image %s) has caches that are not declared as " 2507 "Docker volumes: %s " 2508 "(have you added them as VOLUMEs in the Dockerfile?)" 2509 % (task["label"], task["worker"]["docker-image"], ", ".join(sorted(missing))) 2510 ) 2511 2512 2513 def check_required_volumes(task): 2514 """ 2515 Ensures that all paths that are required to be volumes are defined as volumes. 2516 2517 Performance of writing to files in poor in directories not marked as 2518 volumes, in docker. Ensure that paths that are often written to are marked 2519 as volumes. 2520 """ 2521 volumes = set(task["worker"]["volumes"]) 2522 paths = set(task["worker"].get("required-volumes", [])) 2523 missing = paths - volumes 2524 2525 if not missing: 2526 return 2527 2528 raise Exception( 2529 "task %s (image %s) has paths that should be volumes for peformance " 2530 "that are not declared as Docker volumes: %s " 2531 "(have you added them as VOLUMEs in the Dockerfile?)" 2532 % (task["label"], task["worker"]["docker-image"], ", ".join(sorted(missing))) 2533 ) 2534 2535 2536 @transforms.add 2537 def check_run_task_caches(config, tasks): 2538 """Audit for caches requiring run-task. 2539 2540 run-task manages caches in certain ways. If a cache managed by run-task 2541 is used by a non run-task task, it could cause problems. So we audit for 2542 that and make sure certain cache names are exclusive to run-task. 2543 2544 IF YOU ARE TEMPTED TO MAKE EXCLUSIONS TO THIS POLICY, YOU ARE LIKELY 2545 CONTRIBUTING TECHNICAL DEBT AND WILL HAVE TO SOLVE MANY OF THE PROBLEMS 2546 THAT RUN-TASK ALREADY SOLVES. THINK LONG AND HARD BEFORE DOING THAT. 2547 """ 2548 re_reserved_caches = re.compile( 2549 """^ 2550 (checkouts|tooltool-cache) 2551 """, 2552 re.VERBOSE, 2553 ) 2554 2555 re_checkout_cache = re.compile("^checkouts") 2556 re_sparse_checkout_cache = re.compile("^checkouts-sparse") 2557 re_shallow_checkout_cache = re.compile("^checkouts-git-shallow") 2558 2559 cache_prefix = "{trust_domain}-level-{level}-".format( 2560 trust_domain=config.graph_config["trust-domain"], 2561 level=config.params["level"], 2562 ) 2563 2564 suffix = _run_task_suffix(config.params["repository_type"]) 2565 2566 for task in tasks: 2567 payload = task["task"].get("payload", {}) 2568 command = payload.get("command") or [""] 2569 2570 main_command = command[0] if isinstance(command[0], str) else "" 2571 run_task = is_run_task(main_command) 2572 2573 require_sparse_cache = False 2574 require_shallow_cache = False 2575 have_checkout_cache = False 2576 have_sparse_cache = False 2577 have_shallow_cache = False 2578 2579 if run_task: 2580 for arg in command[1:]: 2581 if not isinstance(arg, str): 2582 continue 2583 2584 if arg == "--": 2585 break 2586 2587 if arg.startswith("--gecko-sparse-profile"): 2588 if "=" not in arg: 2589 raise Exception( 2590 "{} is specifying `--gecko-sparse-profile` to run-task " 2591 "as two arguments. Unable to determine if the sparse " 2592 "profile exists.".format(task["label"]) 2593 ) 2594 _, sparse_profile = arg.split("=", 1) 2595 if not os.path.exists(os.path.join(GECKO, sparse_profile)): 2596 raise Exception( 2597 "{} is using non-existant sparse profile {}.".format( 2598 task["label"], sparse_profile 2599 ) 2600 ) 2601 require_sparse_cache = True 2602 break 2603 2604 if arg == "--gecko-shallow-clone": 2605 require_shallow_cache = True 2606 break 2607 2608 for cache in payload.get("cache", {}): 2609 if not cache.startswith(cache_prefix): 2610 raise Exception( 2611 "{} is using a cache ({}) which is not appropriate " 2612 "for its trust-domain and level. It should start with {}.".format( 2613 task["label"], cache, cache_prefix 2614 ) 2615 ) 2616 2617 cache = cache[len(cache_prefix) :] 2618 2619 if re_checkout_cache.match(cache): 2620 have_checkout_cache = True 2621 2622 if re_sparse_checkout_cache.match(cache): 2623 have_sparse_cache = True 2624 2625 if re_shallow_checkout_cache.match(cache): 2626 have_shallow_cache = True 2627 2628 if not re_reserved_caches.match(cache): 2629 continue 2630 2631 if not run_task: 2632 raise Exception( 2633 "%s is using a cache (%s) reserved for run-task " 2634 "change the task to use run-task or use a different " 2635 "cache name" % (task["label"], cache) 2636 ) 2637 2638 if not cache.endswith(suffix): 2639 raise Exception( 2640 "%s is using a cache (%s) reserved for run-task " 2641 "but the cache name is not dependent on the contents " 2642 "of run-task; change the cache name to conform to the " 2643 "naming requirements" % (task["label"], cache) 2644 ) 2645 2646 if have_checkout_cache and require_sparse_cache and not have_sparse_cache: 2647 raise Exception( 2648 "%s is using a sparse checkout but not using " 2649 "a sparse checkout cache; change the checkout " 2650 "cache name so it is sparse aware" % task["label"] 2651 ) 2652 2653 if have_checkout_cache and require_shallow_cache and not have_shallow_cache: 2654 raise Exception( 2655 "%s is using a shallow clone but not using " 2656 "a shallow checkout cache; change the checkout " 2657 "cache name so it is shallow aware" % task["label"] 2658 ) 2659 2660 yield task