tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

jobs.py (5131B)


      1 # mypy: allow-untyped-defs
      2 
      3 import argparse
      4 import json
      5 import os
      6 import re
      7 import sys
      8 from ..wpt.testfiles import branch_point, files_changed
      9 
     10 from tools import localpaths  # noqa: F401
     11 
     12 wpt_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
     13 
     14 # Common exclusions between affected_tests and stability jobs.
     15 # Files in these dirs would trigger the execution of too many tests.
     16 EXCLUDES = [
     17    "!tools/",
     18    "!docs/",
     19    "!conformance-checkers/",
     20    "!.*/OWNERS",
     21    "!.*/META.yml",
     22    "!.*/tools/",
     23    "!.*/README",
     24    "!css/[^/]*$"
     25 ]
     26 
     27 # Rules are just regex on the path, with a leading ! indicating a regex that must not
     28 # match for the job. Paths should be kept in sync with scripts in update_built.py.
     29 job_path_map = {
     30    "affected_tests": [".*/.*", "!resources/(?!idlharness.js)"] + EXCLUDES,
     31    "stability": [".*/.*", "!resources/.*"] + EXCLUDES,
     32    "lint": [".*"],
     33    "manifest_upload": [".*"],
     34    "resources_unittest": ["resources/", "tools/"],
     35    "tools_unittest": ["tools/"],
     36    "wptrunner_unittest": ["tools/"],
     37    "update_built": ["conformance-checkers/",
     38                     "css/css-images/",
     39                     "css/css-ui/",
     40                     "css/css-writing-modes/",
     41                     "fetch/metadata/",
     42                     "html/",
     43                     "infrastructure/",
     44                     "mimesniff/"],
     45    "wpt_integration": ["tools/"],
     46    "wptrunner_infrastructure": ["infrastructure/",
     47                                 "tools/",
     48                                 "resources/",
     49                                 "webdriver/tests/support"],
     50 }
     51 
     52 
     53 def _path_norm(path):
     54    """normalize a path for both case and slashes (to /)"""
     55    path = os.path.normcase(path)
     56    if os.path.sep != "/":
     57        # this must be after the normcase call as that does slash normalization
     58        path = path.replace(os.path.sep, "/")
     59    return path
     60 
     61 
     62 class Ruleset:
     63    def __init__(self, rules):
     64        self.include = []
     65        self.exclude = []
     66        for rule in rules:
     67            rule = _path_norm(rule)
     68            self.add_rule(rule)
     69 
     70    def add_rule(self, rule):
     71        if rule.startswith("!"):
     72            target = self.exclude
     73            rule = rule[1:]
     74        else:
     75            target = self.include
     76 
     77        target.append(re.compile("^%s" % rule))
     78 
     79    def __call__(self, path):
     80        path = _path_norm(path)
     81        for item in self.exclude:
     82            if item.match(path):
     83                return False
     84        for item in self.include:
     85            if item.match(path):
     86                return True
     87        return False
     88 
     89    def __repr__(self):
     90        subs = tuple(",".join(item.pattern for item in target)
     91                     for target in (self.include, self.exclude))
     92        return "Rules<include:[%s] exclude:[%s]>" % subs
     93 
     94 
     95 def get_paths(**kwargs):
     96    if kwargs["revish"] is None:
     97        revish = "%s..HEAD" % branch_point()
     98    else:
     99        revish = kwargs["revish"]
    100 
    101    changed, _ = files_changed(revish, ignore_rules=[])
    102    all_changed = {os.path.relpath(item, wpt_root) for item in set(changed)}
    103    return all_changed
    104 
    105 
    106 def get_jobs(paths, **kwargs):
    107    if kwargs.get("all"):
    108        return set(job_path_map.keys())
    109 
    110    jobs = set()
    111 
    112    rules = {}
    113    includes = kwargs.get("includes")
    114    if includes is not None:
    115        includes = set(includes)
    116    for key, value in job_path_map.items():
    117        if includes is None or key in includes:
    118            rules[key] = Ruleset(value)
    119 
    120    for path in paths:
    121        for job in list(rules.keys()):
    122            ruleset = rules[job]
    123            if ruleset(path):
    124                rules.pop(job)
    125                jobs.add(job)
    126        if not rules:
    127            break
    128 
    129    # Default jobs should run even if there were no changes
    130    if not paths:
    131        for job, path_re in job_path_map.items():
    132            if ".*" in path_re:
    133                jobs.add(job)
    134 
    135    return jobs
    136 
    137 
    138 def create_parser():
    139    parser = argparse.ArgumentParser()
    140    parser.add_argument("revish", nargs="?",
    141                        help="Commits to consider. Defaults to the commits on the current branch")
    142    parser.add_argument("--all", action="store_true",
    143                        help="List all jobs unconditionally.")
    144    parser.add_argument("--includes", nargs="*",
    145                        help="Jobs to check for. Return code is 0 if all jobs are found, otherwise 1")
    146    parser.add_argument("--json", action="store_true",
    147                        help="Output jobs as JSON, instead of one per line")
    148    parser.add_argument("--json-indent", type=int,
    149                        help="Indent the JSON with this many spaces (default: no indentation, single line output)")
    150    return parser
    151 
    152 
    153 def run(**kwargs):
    154    paths = get_paths(**kwargs)
    155    jobs = get_jobs(paths, **kwargs)
    156    if not kwargs["includes"]:
    157        if kwargs["json"]:
    158            json.dump(sorted(jobs), sys.stdout, indent=kwargs["json_indent"])
    159            sys.stdout.write("\n")
    160        else:
    161            for item in sorted(jobs):
    162                print(item)
    163    else:
    164        return 0 if set(kwargs["includes"]).issubset(jobs) else 1