tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

mach_commands.py (13620B)


      1 # This Source Code Form is subject to the terms of the Mozilla Public
      2 # License, v. 2.0. If a copy of the MPL was not distributed with this
      3 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
      4 
      5 
      6 import argparse
      7 import json
      8 import logging
      9 import os
     10 import pathlib
     11 import sys
     12 import time
     13 import traceback
     14 from functools import partial
     15 
     16 import gecko_taskgraph.main
     17 from gecko_taskgraph.files_changed import get_locally_changed_files
     18 from gecko_taskgraph.main import commands as taskgraph_commands
     19 from mach.decorators import Command, CommandArgument, SubCommand
     20 from mach.util import strtobool
     21 from mozsystemmonitor.resourcemonitor import SystemResourceMonitor
     22 
     23 # We're likely going to need the result of get_locally_changed_files, and it
     24 # takes time to finish, so prefetch it as soon as possible.
     25 get_locally_changed_files.preload(os.getcwd())
     26 
     27 
     28 def setup_logging(command_context, quiet=False, verbose=True):
     29    """
     30    Set up Python logging for all loggers, sending results to stderr (so
     31    that command output can be redirected easily) and adding the typical
     32    mach timestamp.
     33    """
     34    # remove the old terminal handler
     35    old = command_context.log_manager.replace_terminal_handler(None)
     36 
     37    # re-add it, with level and fh set appropriately
     38    if not quiet:
     39        level = logging.DEBUG if verbose else logging.INFO
     40        command_context.log_manager.add_terminal_logging(
     41            fh=sys.stderr,
     42            level=level,
     43            write_interval=old.formatter.write_interval,
     44            write_times=old.formatter.write_times,
     45        )
     46        logging.getLogger("taskcluster").setLevel(logging.INFO)
     47 
     48    # all of the taskgraph logging is unstructured logging
     49    command_context.log_manager.enable_unstructured()
     50 
     51 
     52 def get_taskgraph_command_parser(name):
     53    """Given a command name, obtain its argument parser.
     54 
     55    Args:
     56        name (str): Name of the command.
     57 
     58    Returns:
     59        ArgumentParser: An ArgumentParser instance.
     60    """
     61    command = taskgraph_commands[name]
     62    parser = argparse.ArgumentParser()
     63    for arg in command.func.args:
     64        parser.add_argument(*arg[0], **arg[1])
     65 
     66    parser.set_defaults(func=command.func, **command.defaults)
     67    return parser
     68 
     69 
     70 def get_taskgraph_decision_parser():
     71    parser = get_taskgraph_command_parser("decision")
     72 
     73    extra_args = [
     74        (
     75            ["--optimize-target-tasks"],
     76            {
     77                "type": lambda flag: bool(strtobool(flag)),
     78                "nargs": "?",
     79                "const": "true",
     80                "help": "If specified, this indicates whether the target "
     81                "tasks are eligible for optimization. Otherwise, the default "
     82                "for the project is used.",
     83            },
     84        ),
     85        (
     86            ["--include-push-tasks"],
     87            {
     88                "action": "store_true",
     89                "help": "Whether tasks from the on-push graph should be re-used "
     90                "in this graph. This allows cron graphs to avoid rebuilding "
     91                "jobs that were built on-push.",
     92            },
     93        ),
     94        (
     95            ["--rebuild-kind"],
     96            {
     97                "dest": "rebuild_kinds",
     98                "action": "append",
     99                "default": argparse.SUPPRESS,
    100                "help": "Kinds that should not be re-used from the on-push graph.",
    101            },
    102        ),
    103    ]
    104    for arg in extra_args:
    105        parser.add_argument(*arg[0], **arg[1])
    106 
    107    return parser
    108 
    109 
    110 @Command(
    111    "taskgraph",
    112    category="ci",
    113    description="Manipulate TaskCluster task graphs defined in-tree",
    114 )
    115 def taskgraph_command(command_context):
    116    """The taskgraph subcommands all relate to the generation of task graphs
    117    for Gecko continuous integration.  A task graph is a set of tasks linked
    118    by dependencies: for example, a binary must be built before it is tested,
    119    and that build may further depend on various toolchains, libraries, etc.
    120    """
    121 
    122 
    123 @SubCommand(
    124    "taskgraph",
    125    "kind-graph",
    126    description="Generate a graph of the relationship between taskgraph kinds",
    127    parser=partial(get_taskgraph_command_parser, "kind-graph"),
    128 )
    129 def taskgraph_kind_graph(command_context, **options):
    130    try:
    131        setup_logging(command_context)
    132        return taskgraph_commands["kind-graph"].func(options)
    133    except Exception:
    134        traceback.print_exc()
    135        sys.exit(1)
    136 
    137 
    138 @SubCommand(
    139    "taskgraph",
    140    "tasks",
    141    description="Show all tasks in the taskgraph",
    142    parser=partial(get_taskgraph_command_parser, "tasks"),
    143 )
    144 def taskgraph_tasks(command_context, **options):
    145    return run_show_taskgraph(command_context, **options)
    146 
    147 
    148 @SubCommand(
    149    "taskgraph",
    150    "full",
    151    description="Show the full taskgraph",
    152    parser=partial(get_taskgraph_command_parser, "full"),
    153 )
    154 def taskgraph_full(command_context, **options):
    155    return run_show_taskgraph(command_context, **options)
    156 
    157 
    158 @SubCommand(
    159    "taskgraph",
    160    "target",
    161    description="Show the target task set",
    162    parser=partial(get_taskgraph_command_parser, "target"),
    163 )
    164 def taskgraph_target(command_context, **options):
    165    return run_show_taskgraph(command_context, **options)
    166 
    167 
    168 @SubCommand(
    169    "taskgraph",
    170    "target-graph",
    171    description="Show the target taskgraph",
    172    parser=partial(get_taskgraph_command_parser, "target-graph"),
    173 )
    174 def taskgraph_target_graph(command_context, **options):
    175    return run_show_taskgraph(command_context, **options)
    176 
    177 
    178 @SubCommand(
    179    "taskgraph",
    180    "optimized",
    181    description="Show the optimized taskgraph",
    182    parser=partial(get_taskgraph_command_parser, "optimized"),
    183 )
    184 def taskgraph_optimized(command_context, **options):
    185    return run_show_taskgraph(command_context, **options)
    186 
    187 
    188 @SubCommand(
    189    "taskgraph",
    190    "morphed",
    191    description="Show the morphed taskgraph",
    192    parser=partial(get_taskgraph_command_parser, "morphed"),
    193 )
    194 def taskgraph_morphed(command_context, **options):
    195    return run_show_taskgraph(command_context, **options)
    196 
    197 
    198 def run_show_taskgraph(command_context, **options):
    199    # There are cases where we don't want to set up mach logging (e.g logs
    200    # are being redirected to disk). By monkeypatching the 'setup_logging'
    201    # function we can let 'taskgraph.main' decide whether or not to log to
    202    # the terminal.
    203    gecko_taskgraph.main.setup_logging = partial(
    204        setup_logging,
    205        command_context,
    206        quiet=options["quiet"],
    207        verbose=options["verbose"],
    208    )
    209    show_taskgraph = options.pop("func")
    210    return show_taskgraph(options)
    211 
    212 
    213 @SubCommand("taskgraph", "actions", description="Write actions.json to stdout")
    214 @CommandArgument(
    215    "--root", "-r", help="root of the taskgraph definition relative to topsrcdir"
    216 )
    217 @CommandArgument(
    218    "--quiet", "-q", action="store_true", help="suppress all logging output"
    219 )
    220 @CommandArgument(
    221    "--verbose",
    222    "-v",
    223    action="store_true",
    224    help="include debug-level logging output",
    225 )
    226 @CommandArgument(
    227    "--parameters",
    228    "-p",
    229    default="project=mozilla-central",
    230    help="parameters file (.yml or .json; see `taskcluster/docs/parameters.rst`)`",
    231 )
    232 def taskgraph_actions(command_context, **options):
    233    import gecko_taskgraph
    234    import gecko_taskgraph.actions
    235    from taskgraph.generator import TaskGraphGenerator
    236    from taskgraph.parameters import parameters_loader
    237 
    238    try:
    239        setup_logging(
    240            command_context, quiet=options["quiet"], verbose=options["verbose"]
    241        )
    242        parameters = parameters_loader(options["parameters"])
    243 
    244        tgg = TaskGraphGenerator(
    245            root_dir=options.get("root"),
    246            parameters=parameters,
    247        )
    248 
    249        actions = gecko_taskgraph.actions.render_actions_json(
    250            tgg.parameters,
    251            tgg.graph_config,
    252            decision_task_id="DECISION-TASK",
    253        )
    254        print(json.dumps(actions, sort_keys=True, indent=2, separators=(",", ": ")))
    255    except Exception:
    256        traceback.print_exc()
    257        sys.exit(1)
    258 
    259 
    260 @SubCommand(
    261    "taskgraph",
    262    "decision",
    263    description="Run the decision task",
    264    parser=get_taskgraph_decision_parser,
    265 )
    266 def taskgraph_decision(command_context, **options):
    267    """Run the decision task: generate a task graph and submit to
    268    TaskCluster.  This is only meant to be called within decision tasks,
    269    and requires a great many arguments.  Commands like `mach taskgraph
    270    optimized` are better suited to use on the command line, and can take
    271    the parameters file generated by a decision task."""
    272    try:
    273        setup_logging(command_context)
    274 
    275        in_automation = os.environ.get("MOZ_AUTOMATION") == "1"
    276        moz_upload_dir = os.environ.get("MOZ_UPLOAD_DIR")
    277        if in_automation and moz_upload_dir:
    278            monitor = SystemResourceMonitor(poll_interval=0.1)
    279            monitor.start()
    280        else:
    281            monitor = None
    282 
    283        try:
    284            start = time.monotonic()
    285            ret = taskgraph_commands["decision"].func(options)
    286            end = time.monotonic()
    287        finally:
    288            if monitor is not None:
    289                monitor.stop()
    290                upload_dir = pathlib.Path(moz_upload_dir)
    291                profile_path = upload_dir / "profile_build_resources.json"
    292                with open(profile_path, "w", encoding="utf-8", newline="\n") as f:
    293                    to_write = json.dumps(monitor.as_profile(), separators=(",", ":"))
    294                    f.write(to_write)
    295 
    296        if in_automation:
    297            perfherder_data = {
    298                "framework": {"name": "build_metrics"},
    299                "suites": [
    300                    {
    301                        "name": "decision",
    302                        "value": end - start,
    303                        "lowerIsBetter": True,
    304                        "shouldAlert": True,
    305                        "subtests": [],
    306                    }
    307                ],
    308            }
    309            print(
    310                f"PERFHERDER_DATA: {json.dumps(perfherder_data)}",
    311                file=sys.stderr,
    312            )
    313 
    314            if moz_upload_dir:
    315                upload_dir = pathlib.Path(moz_upload_dir)
    316                out_path = upload_dir / "perfherder-data-decision.json"
    317                with out_path.open("w", encoding="utf-8") as f:
    318                    json.dump(perfherder_data, f)
    319 
    320        return ret
    321    except Exception:
    322        traceback.print_exc()
    323        sys.exit(1)
    324 
    325 
    326 @SubCommand(
    327    "taskgraph",
    328    "action-callback",
    329    description="Run action callback used by action tasks",
    330    parser=partial(get_taskgraph_command_parser, "action-callback"),
    331 )
    332 def action_callback(command_context, **options):
    333    setup_logging(command_context)
    334    taskgraph_commands["action-callback"].func(options)
    335 
    336 
    337 @SubCommand(
    338    "taskgraph",
    339    "test-action-callback",
    340    description="Run an action callback in a testing mode",
    341    parser=partial(get_taskgraph_command_parser, "test-action-callback"),
    342 )
    343 def test_action_callback(command_context, **options):
    344    setup_logging(command_context)
    345 
    346    if not options["parameters"]:
    347        options["parameters"] = "project=mozilla-central"
    348 
    349    taskgraph_commands["test-action-callback"].func(options)
    350 
    351 
    352 @SubCommand(
    353    "taskgraph",
    354    "load-image",
    355    description="Load a pre-built Docker image. Note that you need to "
    356    "have docker installed and running for this to work.",
    357    parser=partial(get_taskgraph_command_parser, "load-image"),
    358 )
    359 def load_image(command_context, **kwargs):
    360    setup_logging(command_context)
    361    taskgraph_commands["load-image"].func(kwargs)
    362 
    363 
    364 @SubCommand(
    365    "taskgraph",
    366    "build-image",
    367    description="Build a Docker image",
    368    parser=partial(get_taskgraph_command_parser, "build-image"),
    369 )
    370 def build_image(command_context, **kwargs):
    371    setup_logging(command_context)
    372    try:
    373        taskgraph_commands["build-image"].func(kwargs)
    374    except Exception:
    375        traceback.print_exc()
    376        sys.exit(1)
    377 
    378 
    379 @SubCommand(
    380    "taskgraph",
    381    "image-digest",
    382    description="Print the digest of the image of this name based on the "
    383    "current contents of the tree.",
    384    parser=partial(get_taskgraph_command_parser, "build-image"),
    385 )
    386 def image_digest(command_context, **kwargs):
    387    setup_logging(command_context)
    388    taskgraph_commands["image-digest"].func(kwargs)
    389 
    390 
    391 @SubCommand(
    392    "taskgraph",
    393    "load-task",
    394    description="Loads a pre-built Docker image and drops you into a container with "
    395    "the same environment variables and run-task setup as the specified task. "
    396    "The task's payload.command will be replaced with 'bash'. You need to have "
    397    "docker installed and running for this to work.",
    398    parser=partial(get_taskgraph_command_parser, "load-task"),
    399 )
    400 def load_task(command_context, **kwargs):
    401    setup_logging(command_context)
    402    taskgraph_commands["load-task"].func(kwargs)
    403 
    404 
    405 @Command(
    406    "release-history",
    407    category="ci",
    408    description="Query balrog for release history used by enable partials generation",
    409 )
    410 @CommandArgument(
    411    "-b",
    412    "--branch",
    413    help="The gecko project branch used in balrog, such as "
    414    "mozilla-central, release, maple",
    415 )
    416 @CommandArgument(
    417    "--product", default="Firefox", help="The product identifier, such as 'Firefox'"
    418 )
    419 def generate_partials_builds(command_context, product, branch):
    420    from gecko_taskgraph.util.partials import populate_release_history
    421 
    422    try:
    423        import yaml
    424 
    425        release_history = {"release_history": populate_release_history(product, branch)}
    426        print(
    427            yaml.safe_dump(
    428                release_history, allow_unicode=True, default_flow_style=False
    429            )
    430        )
    431    except Exception:
    432        traceback.print_exc()
    433        sys.exit(1)