l10n.py (16086B)
1 # This Source Code Form is subject to the terms of the Mozilla Public 2 # License, v. 2.0. If a copy of the MPL was not distributed with this 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 """ 5 Do transforms specific to l10n kind 6 """ 7 8 from mozbuild.chunkify import chunkify 9 from taskgraph.transforms.base import TransformSequence 10 from taskgraph.util import json 11 from taskgraph.util.copy import deepcopy 12 from taskgraph.util.dependencies import get_dependencies, get_primary_dependency 13 from taskgraph.util.schema import ( 14 Schema, 15 optionally_keyed_by, 16 resolve_keyed_by, 17 taskref_or_string, 18 ) 19 from taskgraph.util.taskcluster import get_artifact_prefix 20 from taskgraph.util.treeherder import add_suffix 21 from voluptuous import Any, Optional, Required 22 23 from gecko_taskgraph.transforms.job import job_description_schema 24 from gecko_taskgraph.transforms.task import task_description_schema 25 from gecko_taskgraph.util.attributes import ( 26 copy_attributes_from_dependent_job, 27 sorted_unique_list, 28 task_name, 29 ) 30 31 32 def _by_platform(arg): 33 return optionally_keyed_by("build-platform", arg) 34 35 36 l10n_description_schema = Schema({ 37 # Name for this job, inferred from the dependent job before validation 38 Required("name"): str, 39 # build-platform, inferred from dependent job before validation 40 Required("build-platform"): str, 41 # max run time of the task 42 Required("run-time"): _by_platform(int), 43 # Locales not to repack for 44 Required("ignore-locales"): _by_platform([str]), 45 # All l10n jobs use mozharness 46 Required("mozharness"): { 47 # Script to invoke for mozharness 48 Required("script"): _by_platform(str), 49 # Config files passed to the mozharness script 50 Required("config"): _by_platform([str]), 51 # Additional paths to look for mozharness configs in. These should be 52 # relative to the base of the source checkout 53 Optional("config-paths"): [str], 54 # Options to pass to the mozharness script 55 Optional("options"): _by_platform([str]), 56 # Action commands to provide to mozharness script 57 Required("actions"): _by_platform([str]), 58 # if true, perform a checkout of a comm-central based branch inside the 59 # gecko checkout 60 Optional("comm-checkout"): bool, 61 }, 62 # Items for the taskcluster index 63 Optional("index"): { 64 # Product to identify as in the taskcluster index 65 Required("product"): _by_platform(str), 66 # Job name to identify as in the taskcluster index 67 Required("job-name"): _by_platform(str), 68 # Type of index 69 Optional("type"): _by_platform(str), 70 }, 71 # Description of the localized task 72 Required("description"): _by_platform(str), 73 Optional("run-on-projects"): job_description_schema["run-on-projects"], 74 Optional("run-on-repo-type"): job_description_schema["run-on-repo-type"], 75 # worker-type to utilize 76 Required("worker-type"): _by_platform(str), 77 # File which contains the used locales 78 Required("locales-file"): _by_platform(str), 79 # Tooltool visibility required for task. 80 Required("tooltool"): _by_platform(Any("internal", "public")), 81 # Docker image required for task. We accept only in-tree images 82 # -- generally desktop-build or android-build -- for now. 83 Optional("docker-image"): _by_platform( 84 # an in-tree generated docker image (from `taskcluster/docker/<name>`) 85 {"in-tree": str}, 86 ), 87 Optional("fetches"): { 88 str: _by_platform([str]), 89 }, 90 # The set of secret names to which the task has access; these are prefixed 91 # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting 92 # this will enable any worker features required and set the task's scopes 93 # appropriately. `true` here means ['*'], all secrets. Not supported on 94 # Windows 95 Optional("secrets"): _by_platform(Any(bool, [str])), 96 # Information for treeherder 97 Required("treeherder"): { 98 # Platform to display the task on in treeherder 99 Required("platform"): _by_platform(str), 100 # Symbol to use 101 Required("symbol"): str, 102 # Tier this task is 103 Required("tier"): _by_platform(int), 104 }, 105 # Extra environment values to pass to the worker 106 Optional("env"): _by_platform({str: taskref_or_string}), 107 # Max number locales per chunk 108 Optional("locales-per-chunk"): _by_platform(int), 109 # Task deps to chain this task with, added in transforms from primary dependency 110 # if this is a shippable-style build 111 Optional("dependencies"): {str: str}, 112 # Run the task when the listed files change (if present). 113 Optional("when"): {"files-changed": [str]}, 114 # passed through directly to the job description 115 Optional("attributes"): job_description_schema["attributes"], 116 Optional("extra"): job_description_schema["extra"], 117 # Shipping product and phase 118 Optional("shipping-product"): task_description_schema["shipping-product"], 119 Optional("shipping-phase"): task_description_schema["shipping-phase"], 120 Optional("task-from"): task_description_schema["task-from"], 121 }) 122 123 transforms = TransformSequence() 124 125 126 def parse_locales_file(locales_file, platform=None): 127 """Parse the passed locales file for a list of locales.""" 128 locales = [] 129 130 with open(locales_file) as f: 131 if locales_file.endswith("json"): 132 all_locales = json.load(f) 133 # XXX Only single locales are fetched 134 locales = { 135 locale: data["revision"] 136 for locale, data in all_locales.items() 137 if platform is None or platform in data["platforms"] 138 } 139 else: 140 all_locales = f.read().split() 141 # 'default' is the hg revision at the top of hg repo, in this context 142 locales = {locale: "default" for locale in all_locales} 143 return locales 144 145 146 def _remove_locales(locales, to_remove=None): 147 # ja-JP-mac is a mac-only locale, but there are no mac builds being repacked, 148 # so just omit it unconditionally 149 return { 150 locale: revision 151 for locale, revision in locales.items() 152 if locale not in to_remove 153 } 154 155 156 @transforms.add 157 def setup_name(config, jobs): 158 for job in jobs: 159 dep = get_primary_dependency(config, job) 160 assert dep 161 # Set the name to the same as the dep task, without kind name. 162 # Label will get set automatically with this kinds name. 163 job["name"] = job.get("name", task_name(dep)) 164 yield job 165 166 167 @transforms.add 168 def copy_in_useful_magic(config, jobs): 169 for job in jobs: 170 dep = get_primary_dependency(config, job) 171 assert dep 172 attributes = copy_attributes_from_dependent_job(dep) 173 attributes.update(job.get("attributes", {})) 174 # build-platform is needed on `job` for by-build-platform 175 job["build-platform"] = attributes.get("build_platform") 176 job["attributes"] = attributes 177 yield job 178 179 180 transforms.add_validate(l10n_description_schema) 181 182 183 @transforms.add 184 def gather_required_signoffs(config, jobs): 185 for job in jobs: 186 job.setdefault("attributes", {})["required_signoffs"] = sorted_unique_list( 187 *( 188 dep.attributes.get("required_signoffs", []) 189 for dep in get_dependencies(config, job) 190 ) 191 ) 192 yield job 193 194 195 @transforms.add 196 def remove_repackage_dependency(config, jobs): 197 for job in jobs: 198 build_platform = job["attributes"]["build_platform"] 199 if not build_platform.startswith("macosx"): 200 del job["dependencies"]["repackage"] 201 202 yield job 203 204 205 @transforms.add 206 def handle_keyed_by(config, jobs): 207 """Resolve fields that can be keyed by platform, etc.""" 208 fields = [ 209 "locales-file", 210 "locales-per-chunk", 211 "worker-type", 212 "description", 213 "run-time", 214 "docker-image", 215 "secrets", 216 "fetches.toolchain", 217 "fetches.fetch", 218 "tooltool", 219 "env", 220 "ignore-locales", 221 "mozharness.config", 222 "mozharness.options", 223 "mozharness.actions", 224 "mozharness.script", 225 "treeherder.tier", 226 "treeherder.platform", 227 "index.type", 228 "index.product", 229 "index.job-name", 230 "when.files-changed", 231 ] 232 for job in jobs: 233 job = deepcopy(job) # don't overwrite dict values here 234 for field in fields: 235 resolve_keyed_by(item=job, field=field, item_name=job["name"]) 236 yield job 237 238 239 @transforms.add 240 def handle_artifact_prefix(config, jobs): 241 """Resolve ``artifact_prefix`` in env vars""" 242 for job in jobs: 243 artifact_prefix = get_artifact_prefix(job) 244 for k1, v1 in job.get("env", {}).items(): 245 if isinstance(v1, str): 246 job["env"][k1] = v1.format(artifact_prefix=artifact_prefix) 247 elif isinstance(v1, dict): 248 for k2, v2 in v1.items(): 249 job["env"][k1][k2] = v2.format(artifact_prefix=artifact_prefix) 250 yield job 251 252 253 @transforms.add 254 def all_locales_attribute(config, jobs): 255 for job in jobs: 256 locales_platform = job["attributes"]["build_platform"].replace("-shippable", "") 257 locales_platform = locales_platform.replace("-pgo", "") 258 locales_with_changesets = parse_locales_file( 259 job["locales-file"], platform=locales_platform 260 ) 261 locales_with_changesets = _remove_locales( 262 locales_with_changesets, to_remove=job["ignore-locales"] 263 ) 264 265 locales = sorted(locales_with_changesets.keys()) 266 attributes = job.setdefault("attributes", {}) 267 attributes["all_locales"] = locales 268 attributes["all_locales_with_changesets"] = locales_with_changesets 269 if job.get("shipping-product"): 270 attributes["shipping_product"] = job["shipping-product"] 271 yield job 272 273 274 @transforms.add 275 def chunk_locales(config, jobs): 276 """Utilizes chunking for l10n stuff""" 277 for job in jobs: 278 locales_per_chunk = job.get("locales-per-chunk") 279 locales_with_changesets = job["attributes"]["all_locales_with_changesets"] 280 if locales_per_chunk: 281 chunks, remainder = divmod(len(locales_with_changesets), locales_per_chunk) 282 if remainder: 283 chunks = int(chunks + 1) 284 for this_chunk in range(1, chunks + 1): 285 chunked = deepcopy(job) 286 chunked["name"] = chunked["name"].replace("/", f"-{this_chunk}/", 1) 287 chunked["mozharness"]["options"] = chunked["mozharness"].get( 288 "options", [] 289 ) 290 # chunkify doesn't work with dicts 291 locales_with_changesets_as_list = sorted( 292 locales_with_changesets.items() 293 ) 294 chunked_locales = chunkify( 295 locales_with_changesets_as_list, this_chunk, chunks 296 ) 297 chunked["mozharness"]["options"].extend([ 298 f"locale={locale}:{changeset}" 299 for locale, changeset in chunked_locales 300 ]) 301 chunked["attributes"]["l10n_chunk"] = str(this_chunk) 302 # strip revision 303 chunked["attributes"]["chunk_locales"] = [ 304 locale for locale, _ in chunked_locales 305 ] 306 307 # add the chunk number to the TH symbol 308 chunked["treeherder"]["symbol"] = add_suffix( 309 chunked["treeherder"]["symbol"], this_chunk 310 ) 311 yield chunked 312 else: 313 job["mozharness"]["options"] = job["mozharness"].get("options", []) 314 job["mozharness"]["options"].extend([ 315 f"locale={locale}:{changeset}" 316 for locale, changeset in sorted(locales_with_changesets.items()) 317 ]) 318 yield job 319 320 321 transforms.add_validate(l10n_description_schema) 322 323 324 @transforms.add 325 def stub_installer(config, jobs): 326 for job in jobs: 327 job.setdefault("attributes", {}) 328 job.setdefault("env", {}) 329 if job["attributes"].get("stub-installer"): 330 job["env"].update({"USE_STUB_INSTALLER": "1"}) 331 yield job 332 333 334 @transforms.add 335 def set_extra_config(config, jobs): 336 for job in jobs: 337 job["mozharness"].setdefault("extra-config", {})["branch"] = config.params[ 338 "project" 339 ] 340 if "update-channel" in job["attributes"]: 341 job["mozharness"]["extra-config"]["update_channel"] = job["attributes"][ 342 "update-channel" 343 ] 344 yield job 345 346 347 @transforms.add 348 def make_job_description(config, jobs): 349 for job in jobs: 350 job["mozharness"].update({ 351 "using": "mozharness", 352 "job-script": "taskcluster/scripts/builder/build-l10n.sh", 353 "secrets": job.get("secrets", False), 354 }) 355 job_description = { 356 "name": job["name"], 357 "worker-type": job["worker-type"], 358 "description": job["description"], 359 "run": job["mozharness"], 360 "attributes": job["attributes"], 361 "treeherder": { 362 "kind": "build", 363 "tier": job["treeherder"]["tier"], 364 "symbol": job["treeherder"]["symbol"], 365 "platform": job["treeherder"]["platform"], 366 }, 367 "run-on-projects": ( 368 job.get("run-on-projects") if job.get("run-on-projects") else [] 369 ), 370 "run-on-repo-type": job.get("run-on-repo-type", ["git", "hg"]), 371 } 372 if job.get("extra"): 373 job_description["extra"] = job["extra"] 374 375 job_description["run"]["tooltool-downloads"] = job["tooltool"] 376 377 job_description["worker"] = { 378 "max-run-time": job["run-time"], 379 "chain-of-trust": True, 380 } 381 if job["worker-type"] in ["b-win2012", "b-win2022"]: 382 job_description["worker"]["os"] = "windows" 383 job_description["run"]["use-simple-package"] = False 384 job_description["run"]["use-magic-mh-args"] = False 385 386 if job.get("docker-image"): 387 job_description["worker"]["docker-image"] = job["docker-image"] 388 389 if job.get("fetches"): 390 job_description["fetches"] = job["fetches"] 391 392 if job.get("index"): 393 job_description["index"] = { 394 "product": job["index"]["product"], 395 "job-name": job["index"]["job-name"], 396 "type": job["index"].get("type", "generic"), 397 } 398 399 if job.get("dependencies"): 400 job_description["dependencies"] = job["dependencies"] 401 if job.get("env"): 402 job_description["worker"]["env"] = job["env"] 403 if job.get("when", {}).get("files-changed"): 404 job_description.setdefault("when", {}) 405 job_description["when"]["files-changed"] = [job["locales-file"]] + job[ 406 "when" 407 ]["files-changed"] 408 409 if "shipping-phase" in job: 410 job_description["shipping-phase"] = job["shipping-phase"] 411 412 if "shipping-product" in job: 413 job_description["shipping-product"] = job["shipping-product"] 414 415 yield job_description 416 417 418 @transforms.add 419 def add_macos_signing_artifacts(config, jobs): 420 for job in jobs: 421 if "macosx" not in job["name"]: 422 yield job 423 continue 424 build_dep = None 425 for dep_job in get_dependencies(config, job): 426 if dep_job.kind == "build": 427 build_dep = dep_job 428 break 429 assert build_dep, f"l10n job {job['name']} has no build dependency" 430 for path, artifact in build_dep.task["payload"]["artifacts"].items(): 431 if path.startswith("public/build/security/"): 432 job["worker"].setdefault("artifacts", []).append({ 433 "name": path, 434 "path": artifact["path"], 435 "type": "file", 436 }) 437 yield job