tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

gn_editor.py (17741B)


      1 #!/usr/bin/env python3
      2 # Copyright 2022 The Chromium Authors
      3 # Use of this source code is governed by a BSD-style license that can be
      4 # found in the LICENSE file.
      5 
      6 import argparse
      7 import dataclasses
      8 import functools
      9 import json
     10 import logging
     11 import multiprocessing
     12 import os
     13 import pathlib
     14 import subprocess
     15 import sys
     16 from typing import List, Optional, Set
     17 
     18 import json_gn_editor
     19 import utils
     20 
     21 _SRC_PATH = pathlib.Path(__file__).resolve().parents[2]
     22 
     23 _BUILD_ANDROID_PATH = _SRC_PATH / 'build/android'
     24 if str(_BUILD_ANDROID_PATH) not in sys.path:
     25    sys.path.append(str(_BUILD_ANDROID_PATH))
     26 from pylib import constants
     27 
     28 _BUILD_ANDROID_GYP_PATH = _SRC_PATH / 'build/android/gyp'
     29 if str(_BUILD_ANDROID_GYP_PATH) not in sys.path:
     30    sys.path.append(str(_BUILD_ANDROID_GYP_PATH))
     31 
     32 from util import build_utils
     33 
     34 _GIT_IGNORE_STR = '(git ignored file) '
     35 
     36 NO_VALID_GN_STR = 'No valid GN files found after filtering.'
     37 
     38 
     39 @dataclasses.dataclass
     40 class OperationResult:
     41    path: str
     42    git_ignored: bool = False
     43    dryrun: bool = False
     44    skipped: bool = False
     45    skip_reason: str = ''
     46 
     47    def __str__(self):
     48        msg = f'Skipped ' if self.skipped else 'Updated '
     49        dryrun = '[DRYRUN] ' if self.dryrun else ''
     50        ignore = _GIT_IGNORE_STR if self.git_ignored else ''
     51        skip = f' ({self.skip_reason})' if self.skipped else ''
     52        return f'{dryrun}{msg}{ignore}{self.path}{skip}'
     53 
     54 
     55 def _add_deps(target: str, deps: List[str], root: pathlib.Path, path: str):
     56    with json_gn_editor.BuildFile(path, root) as build_file:
     57        build_file.add_deps(target, deps)
     58 
     59 
     60 def _search_deps(name_query: Optional[str], path_query: Optional[str],
     61                 root: pathlib.Path, path: str):
     62    with json_gn_editor.BuildFile(path, root) as build_file:
     63        build_file.search_deps(name_query, path_query)
     64 
     65 
     66 def _split_deps(existing_dep: str, new_deps: List[str], root: pathlib.Path,
     67                path: str, dryrun: bool) -> Optional[OperationResult]:
     68    with json_gn_editor.BuildFile(path, root, dryrun=dryrun) as build_file:
     69        if build_file.split_deps(existing_dep, new_deps):
     70            return OperationResult(path=os.path.relpath(path, start=root),
     71                                   git_ignored=utils.is_git_ignored(
     72                                       root, path),
     73                                   dryrun=dryrun)
     74    return None
     75 
     76 
     77 def _remove_deps(
     78        *, deps: List[str], out_dir: str, root: pathlib.Path, path: str,
     79        dryrun: bool, targets: List[str], inline_mode: bool,
     80        target_name_filter: Optional[str]) -> Optional[OperationResult]:
     81    with json_gn_editor.BuildFile(path, root, dryrun=dryrun) as build_file:
     82        if build_file.remove_deps(deps, out_dir, targets, target_name_filter,
     83                                  inline_mode):
     84            return OperationResult(path=os.path.relpath(path, start=root),
     85                                   git_ignored=utils.is_git_ignored(
     86                                       root, path),
     87                                   dryrun=dryrun)
     88    return None
     89 
     90 
     91 def _add(args: argparse.Namespace, build_filepaths: List[str],
     92         root: pathlib.Path):
     93    deps = args.deps
     94    target = args.target
     95    with multiprocessing.Pool() as pool:
     96        pool.map(
     97            functools.partial(_add_deps, target, deps, root),
     98            build_filepaths,
     99        )
    100 
    101 
    102 def _search(args: argparse.Namespace, build_filepaths: List[str],
    103            root: pathlib.Path):
    104    name_query = args.name
    105    path_query = args.path
    106    if name_query:
    107        logging.info(f'Searching dep names using: {name_query}')
    108    if path_query:
    109        logging.info(f'Searching paths using: {path_query}')
    110    with multiprocessing.Pool() as pool:
    111        pool.map(
    112            functools.partial(_search_deps, name_query, path_query, root),
    113            build_filepaths,
    114        )
    115 
    116 
    117 def _split(args: argparse.Namespace, build_filepaths: List[str],
    118           root: pathlib.Path) -> List[OperationResult]:
    119    num_total = len(build_filepaths)
    120    results = []
    121    with multiprocessing.Pool() as pool:
    122        tasks = {
    123            filepath: pool.apply_async(
    124                _split_deps,
    125                (args.existing, args.new, root, filepath, args.dryrun))
    126            for filepath in build_filepaths
    127        }
    128        for idx, filepath in enumerate(tasks.keys()):
    129            relpath = os.path.relpath(filepath, start=root)
    130            logging.info('[%d/%d] Checking %s', idx, num_total, relpath)
    131            operation_result = tasks[filepath].get()
    132            if operation_result:
    133                logging.info(operation_result)
    134                results.append(operation_result)
    135    return results
    136 
    137 
    138 def _get_project_json_contents(out_dir: str) -> str:
    139    project_json_path = os.path.join(out_dir, 'project.json')
    140    with open(project_json_path) as f:
    141        return f.read()
    142 
    143 
    144 def _calculate_targets_for_file(relpath: str, arg_extra_targets: List[str],
    145                                all_targets: Set[str]) -> Optional[List[str]]:
    146    if os.path.basename(relpath) != 'BUILD.gn':
    147        # Build all targets when we are dealing with build files that might be
    148        # imported by other build files (e.g. config.gni or other_name.gn).
    149        return []
    150    dirpath = os.path.dirname(relpath)
    151    file_extra_targets = []
    152    for full_target_name in all_targets:
    153        target_dir, short_target_name = full_target_name.split(':', 1)
    154        # __ is used for sub-targets in GN, only focus on top-level ones. Also
    155        # skip targets using other toolchains, e.g.
    156        # base:feature_list_buildflags(//build/toolchain/linux:clang_x64)
    157        if (target_dir == dirpath and '__' not in short_target_name
    158                and '(' not in short_target_name):
    159            file_extra_targets.append(full_target_name)
    160    targets = arg_extra_targets + file_extra_targets
    161    return targets or None
    162 
    163 
    164 def _remove(args: argparse.Namespace, build_filepaths: List[str],
    165            root: pathlib.Path) -> List[OperationResult]:
    166    num_total = len(build_filepaths)
    167 
    168    if args.output_directory:
    169        constants.SetOutputDirectory(args.output_directory)
    170    constants.CheckOutputDirectory()
    171    out_dir: str = constants.GetOutDirectory()
    172 
    173    args_gn_path = os.path.join(out_dir, 'args.gn')
    174    if not os.path.exists(args_gn_path):
    175        raise Exception(f'No args.gn in out directory {out_dir}')
    176    with open(args_gn_path) as f:
    177        # Although the target may compile fine, bytecode checks are necessary
    178        # for correctness at runtime.
    179        assert 'android_static_analysis = "on"' in f.read(), (
    180            'Static analysis must be on to ensure correctness.')
    181        # TODO: Ensure that the build server is not running.
    182 
    183    logging.info(f'Running "gn gen" in output directory: {out_dir}')
    184    build_utils.CheckOutput(['gn', 'gen', '-C', out_dir, '--ide=json'])
    185 
    186    if args.all_java_deps:
    187        assert not args.dep, '--all-java-target does not support passing deps.'
    188        assert args.file, '--all-java-target requires passing --file.'
    189        logging.info(f'Finding java deps under {out_dir}.')
    190        all_java_deps = build_utils.CheckOutput([
    191            str(_SRC_PATH / 'build' / 'android' / 'list_java_targets.py'),
    192            '--gn-labels', '-C', out_dir
    193        ]).split('\n')
    194        logging.info(f'Found {len(all_java_deps)} java deps.')
    195        args.dep += all_java_deps
    196    else:
    197        assert args.dep, 'At least one explicit dep is required.'
    198 
    199    project_json_contents = _get_project_json_contents(out_dir)
    200    project_json = json.loads(project_json_contents)
    201    # The input file names have a // prefix. (e.g. //android_webview/BUILD.gn)
    202    known_build_files = set(
    203        name[2:] for name in project_json['build_settings']['gen_input_files'])
    204    # Remove the // prefix for target names so ninja can build them.
    205    known_target_names = set(name[2:]
    206                             for name in project_json['targets'].keys())
    207 
    208    unknown_targets = [
    209        t for t in args.extra_build_targets if t not in known_target_names
    210    ]
    211    assert not unknown_targets, f'Cannot build {unknown_targets} in {out_dir}.'
    212 
    213    logging.info('Building all targets in preparation for removing deps')
    214    # Avoid capturing stdout/stderr to see the progress of the full build.
    215    subprocess.run(['autoninja', '-C', out_dir], check=True)
    216 
    217    results = []
    218    for idx, filepath in enumerate(build_filepaths):
    219        # Since removal can take a long time, provide an easy way to resume the
    220        # command if something fails.
    221        try:
    222            # When resuming, the first build file is the one that is being
    223            # resumed. Avoid inline mode skipping it since it's already started
    224            # to be processed and the first dep may already have been removed.
    225            if args.resume_from and idx == 0 and args.inline_mode:
    226                logging.info(f'Resuming: skipping inline mode for {filepath}.')
    227                should_inline = False
    228            else:
    229                should_inline = args.inline_mode
    230            relpath = os.path.relpath(filepath, start=root)
    231            logging.info('[%d/%d] Checking %s', idx, num_total, relpath)
    232            if relpath not in known_build_files:
    233                operation_result = OperationResult(
    234                    path=relpath,
    235                    skipped=True,
    236                    skip_reason='Not in the list of known build files.')
    237            else:
    238                targets = _calculate_targets_for_file(relpath,
    239                                                      args.extra_build_targets,
    240                                                      known_target_names)
    241                if targets is None:
    242                    operation_result = OperationResult(
    243                        path=relpath,
    244                        skipped=True,
    245                        skip_reason='Could not find any valid targets.')
    246                else:
    247                    operation_result = _remove_deps(
    248                        deps=args.dep,
    249                        out_dir=out_dir,
    250                        root=root,
    251                        path=filepath,
    252                        dryrun=args.dryrun,
    253                        targets=targets,
    254                        inline_mode=should_inline,
    255                        target_name_filter=args.target_name_filter)
    256            if operation_result:
    257                logging.info(operation_result)
    258                results.append(operation_result)
    259        # Use blank except: to show this for KeyboardInterrupt as well.
    260        except:
    261            logging.error(
    262                f'Encountered error while processing {filepath}. Append the '
    263                'following args to resume from this file once the error is '
    264                f'fixed:\n\n--resume-from {filepath}\n')
    265            raise
    266    return results
    267 
    268 
    269 def main():
    270    parser = argparse.ArgumentParser(
    271        prog='gn_editor', description='Add or remove deps programatically.')
    272 
    273    common_args_parser = argparse.ArgumentParser(add_help=False)
    274    common_args_parser.add_argument(
    275        '-n',
    276        '--dryrun',
    277        action='store_true',
    278        help='Show which files would be updated but avoid changing them.')
    279    common_args_parser.add_argument('-v',
    280                                    '--verbose',
    281                                    action='store_true',
    282                                    help='Used to print ninjalog.')
    283    common_args_parser.add_argument('-q',
    284                                    '--quiet',
    285                                    action='store_true',
    286                                    help='Used to print less logging.')
    287    common_args_parser.add_argument('--file',
    288                                    help='Run on a specific build file.')
    289    common_args_parser.add_argument(
    290        '--resume-from',
    291        help='Skip files before this build file path (debugging).')
    292 
    293    subparsers = parser.add_subparsers(
    294        required=True, help='Use subcommand -h to see full usage.')
    295 
    296    add_parser = subparsers.add_parser(
    297        'add',
    298        parents=[common_args_parser],
    299        help='Add one or more deps to a specific target (pass the path to the '
    300        'BUILD.gn via --file for faster results). The target **must** '
    301        'have a deps variable defined, even if it is an empty [].')
    302    add_parser.add_argument('--target', help='The name of the target.')
    303    add_parser.add_argument('--deps',
    304                            nargs='+',
    305                            help='The name(s) of the new dep(s).')
    306    add_parser.set_defaults(command=_add)
    307 
    308    search_parser = subparsers.add_parser(
    309        'search',
    310        parents=[common_args_parser],
    311        help='Search for strings in build files. Each query is a regex string.'
    312    )
    313    search_parser.add_argument('--name',
    314                               help='This is checked against dep names.')
    315    search_parser.add_argument(
    316        '--path', help='This checks the relative path of the build file.')
    317    search_parser.set_defaults(command=_search)
    318 
    319    split_parser = subparsers.add_parser(
    320        'split',
    321        parents=[common_args_parser],
    322        help='Split one or more deps from an existing dep.')
    323    split_parser.add_argument('existing', help='The dep to split from.')
    324    split_parser.add_argument('new',
    325                              nargs='+',
    326                              help='One of the new deps to be added.')
    327    split_parser.set_defaults(command=_split)
    328 
    329    remove_parser = subparsers.add_parser(
    330        'remove',
    331        parents=[common_args_parser],
    332        help='Remove one or more deps if the build still succeeds. Removing '
    333        'one dep at a time is recommended.')
    334    remove_parser.add_argument(
    335        'dep',
    336        nargs='*',
    337        help='One or more deps to be removed. Zero when other options are used.'
    338    )
    339    remove_parser.add_argument(
    340        '-C',
    341        '--output-directory',
    342        metavar='OUT',
    343        help='If outdir is not provided, will attempt to guess.')
    344    remove_parser.add_argument(
    345        '--target-name-filter',
    346        help='This will cause the script to only remove deps from targets that '
    347        'match the filter provided. The filter should be a valid python regex '
    348        'string and is used in a re.search on the full GN target names, e.g. '
    349        're.search(pattern, "//base:base_java").')
    350    remove_parser.add_argument(
    351        '--all-java-deps',
    352        action='store_true',
    353        help='This will attempt to remove all known java deps. This option '
    354        'requires no explicit deps to be passed.')
    355    remove_parser.add_argument(
    356        '--extra-build-targets',
    357        metavar='T',
    358        nargs='*',
    359        default=[],
    360        help='The set of extra targets to compile after each dep removal. This '
    361        'is in addition to file-based targets that are automatically added.')
    362    remove_parser.add_argument(
    363        '--inline-mode',
    364        action='store_true',
    365        help='Skip the build file if the first dep is not found and removed. '
    366        'This is especially useful when inlining deps so that a build file '
    367        'that does not contain the dep being inlined can be skipped. This '
    368        'mode assumes that the first dep is the one being inlined.')
    369    remove_parser.set_defaults(command=_remove)
    370 
    371    args = parser.parse_args()
    372 
    373    if args.quiet:
    374        level = logging.WARNING
    375    elif args.verbose:
    376        level = logging.DEBUG
    377    else:
    378        level = logging.INFO
    379    logging.basicConfig(
    380        level=level, format='%(levelname).1s %(relativeCreated)7d %(message)s')
    381 
    382    root = _SRC_PATH
    383    if args.file:
    384        build_filepaths = [os.path.relpath(args.file, root)]
    385    else:
    386        build_filepaths = []
    387        logging.info('Finding build files under %s', root)
    388        for dirpath, _, filenames in os.walk(root):
    389            for filename in filenames:
    390                filepath = os.path.join(dirpath, filename)
    391                if filename.endswith(('.gn', '.gni')):
    392                    build_filepaths.append(filepath)
    393        build_filepaths.sort()
    394 
    395    logging.info('Found %d build files.', len(build_filepaths))
    396 
    397    if args.resume_from:
    398        resume_idx = None
    399        for idx, path in enumerate(build_filepaths):
    400            if path.endswith(args.resume_from):
    401                resume_idx = idx
    402                break
    403        assert resume_idx is not None, f'Did not find {args.resume_from}.'
    404        logging.info('Skipping %d build files with --resume-from.', resume_idx)
    405        build_filepaths = build_filepaths[resume_idx:]
    406 
    407    filtered_build_filepaths = [
    408        p for p in build_filepaths if not utils.is_bad_gn_file(p, root)
    409    ]
    410    num_total = len(filtered_build_filepaths)
    411    if num_total == 0:
    412        logging.error(NO_VALID_GN_STR)
    413        sys.exit(1)
    414    logging.info('Running on %d valid build files.', num_total)
    415 
    416    operation_results: List[OperationResult] = args.command(
    417        args, filtered_build_filepaths, root)
    418    if operation_results is None:
    419        return
    420    ignored_operation_results = [r for r in operation_results if r.git_ignored]
    421    skipped_operation_results = [r for r in operation_results if r.skipped]
    422    num_ignored = len(ignored_operation_results)
    423    num_skipped = len(skipped_operation_results)
    424    num_updated = len(operation_results) - num_skipped
    425    print(f'Checked {num_total}, updated {num_updated} ({num_ignored} of '
    426          f'which are ignored by git under {root}), and skipped {num_skipped} '
    427          'build files.')
    428    if num_ignored:
    429        print(f'\nThe following {num_ignored} files were ignored by git and '
    430              'may need separate CLs in their respective repositories:')
    431        for result in ignored_operation_results:
    432            print('  ' + result.path)
    433 
    434 
    435 if __name__ == '__main__':
    436    main()