__init__.py (27403B)
1 # This Source Code Form is subject to the terms of the Mozilla Public 2 # License, v. 2.0. If a copy of the MPL was not distributed with this 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 5 # This module contains code for managing WebIDL files and bindings for 6 # the build system. 7 8 import errno 9 import hashlib 10 import json 11 import logging 12 import os 13 import sys 14 from multiprocessing import Pool 15 16 import mozpack.path as mozpath 17 from Codegen import CGThing 18 from mach.mixin.logging import LoggingMixin 19 from mozbuild.makeutil import Makefile 20 from mozbuild.pythonutil import iter_modules_in_path 21 from mozbuild.util import FileAvoidWrite, cpu_count 22 23 # There are various imports in this file in functions to avoid adding 24 # dependencies to config.status. See bug 949875. 25 26 # Limit the count on Windows, because of bug 1889842 and also the 27 # inefficiency of fork on Windows. 28 DEFAULT_PROCESS_COUNT = 4 if sys.platform == "win32" else cpu_count() 29 30 31 class WebIDLPool: 32 """ 33 Distribute generation load across several processes, avoiding redundant state 34 copies. 35 """ 36 37 GeneratorState = None 38 39 def __init__(self, GeneratorState, *, processes=None): 40 if processes is None: 41 processes = DEFAULT_PROCESS_COUNT 42 43 # As a special case, don't spawn an extra process if processes=1 44 if processes == 1: 45 WebIDLPool._init(GeneratorState) 46 47 class SeqPool: 48 def map(self, *args): 49 return list(map(*args)) 50 51 self.pool = SeqPool() 52 else: 53 self.pool = Pool( 54 initializer=WebIDLPool._init, 55 initargs=(GeneratorState,), 56 processes=processes, 57 ) 58 59 def run(self, filenames): 60 return self.pool.map(WebIDLPool._run, filenames) 61 62 @staticmethod 63 def _init(GeneratorState): 64 WebIDLPool.GeneratorState = GeneratorState 65 66 @staticmethod 67 def _run(filename): 68 return WebIDLPool.GeneratorState._generate_build_files_for_webidl(filename) 69 70 71 class BuildResult: 72 """Represents the result of processing WebIDL files. 73 74 This holds a summary of output file generation during code generation. 75 """ 76 77 def __init__(self): 78 # The .webidl files that had their outputs regenerated. 79 self.inputs = set() 80 81 # The output files that were created. 82 self.created = set() 83 84 # The output files that changed. 85 self.updated = set() 86 87 # The output files that didn't change. 88 self.unchanged = set() 89 90 91 class WebIDLCodegenManagerState(dict): 92 """Holds state for the WebIDL code generation manager. 93 94 State is currently just an extended dict. The internal implementation of 95 state should be considered a black box to everyone except 96 WebIDLCodegenManager. But we'll still document it. 97 98 Any set stored in this dict should be copied and sorted in the `dump()` 99 method. 100 101 Fields: 102 103 version 104 The integer version of the format. This is to detect incompatible 105 changes between state. It should be bumped whenever the format 106 changes or semantics change. 107 108 webidls 109 A dictionary holding information about every known WebIDL input. 110 Keys are the basenames of input WebIDL files. Values are dicts of 111 metadata. Keys in those dicts are: 112 113 * filename - The full path to the input filename. 114 * inputs - A set of full paths to other webidl files this webidl 115 depends on. 116 * outputs - Set of full output paths that are created/derived from 117 this file. 118 * sha1 - The hexidecimal SHA-1 of the input filename from the last 119 processing time. 120 121 global_inputs 122 A dictionary defining files that influence all processing. Keys 123 are full filenames. Values are hexidecimal SHA-1 from the last 124 processing time. 125 126 dictionaries_convertible_to_js 127 A set of names of dictionaries that are convertible to JS. 128 129 dictionaries_convertible_from_js 130 A set of names of dictionaries that are convertible from JS. 131 """ 132 133 VERSION = 3 134 135 def __init__(self, fh=None): 136 self["version"] = self.VERSION 137 self["webidls"] = {} 138 self["global_depends"] = {} 139 140 if not fh: 141 return 142 143 state = json.load(fh) 144 if state["version"] != self.VERSION: 145 raise Exception("Unknown state version: %s" % state["version"]) 146 147 self["version"] = state["version"] 148 self["global_depends"] = state["global_depends"] 149 150 for k, v in state["webidls"].items(): 151 self["webidls"][k] = v 152 153 # Sets are converted to lists for serialization because JSON 154 # doesn't support sets. 155 self["webidls"][k]["inputs"] = set(v["inputs"]) 156 self["webidls"][k]["outputs"] = set(v["outputs"]) 157 158 self["dictionaries_convertible_to_js"] = set( 159 state["dictionaries_convertible_to_js"] 160 ) 161 162 self["dictionaries_convertible_from_js"] = set( 163 state["dictionaries_convertible_from_js"] 164 ) 165 166 def dump(self, fh): 167 """Dump serialized state to a file handle.""" 168 normalized = self.copy() 169 170 webidls = normalized["webidls"] = self["webidls"].copy() 171 for k, v in self["webidls"].items(): 172 webidls_k = webidls[k] = v.copy() 173 174 # Convert sets to lists because JSON doesn't support sets. 175 webidls_k["outputs"] = sorted(v["outputs"]) 176 webidls_k["inputs"] = sorted(v["inputs"]) 177 178 normalized["dictionaries_convertible_to_js"] = sorted( 179 self["dictionaries_convertible_to_js"] 180 ) 181 182 normalized["dictionaries_convertible_from_js"] = sorted( 183 self["dictionaries_convertible_from_js"] 184 ) 185 186 json.dump(normalized, fh, sort_keys=True) 187 188 189 class WebIDLCodegenManager(LoggingMixin): 190 """Manages all code generation around WebIDL. 191 192 To facilitate testing, this object is meant to be generic and reusable. 193 Paths, etc should be parameters and not hardcoded. 194 """ 195 196 # Global parser derived declaration files. 197 GLOBAL_DECLARE_FILES = { 198 "BindingNames.h", 199 "GeneratedAtomList.h", 200 "GeneratedEventList.h", 201 "PrototypeList.h", 202 "RegisterBindings.h", 203 "RegisterShadowRealmBindings.h", 204 "RegisterWorkerBindings.h", 205 "RegisterWorkerDebuggerBindings.h", 206 "RegisterWorkletBindings.h", 207 "UnionTypes.h", 208 "WebIDLPrefs.h", 209 "WebIDLSerializable.h", 210 } 211 212 # Global parser derived definition files. 213 GLOBAL_DEFINE_FILES = { 214 "BindingNames.cpp", 215 "RegisterBindings.cpp", 216 "RegisterShadowRealmBindings.cpp", 217 "RegisterWorkerBindings.cpp", 218 "RegisterWorkerDebuggerBindings.cpp", 219 "RegisterWorkletBindings.cpp", 220 "UnionTypes.cpp", 221 "PrototypeList.cpp", 222 "WebIDLPrefs.cpp", 223 "WebIDLSerializable.cpp", 224 } 225 226 def __init__( 227 self, 228 config_path, 229 webidl_root, 230 inputs, 231 exported_header_dir, 232 codegen_dir, 233 state_path, 234 cache_dir=None, 235 make_deps_path=None, 236 make_deps_target=None, 237 ): 238 """Create an instance that manages WebIDLs in the build system. 239 240 config_path refers to a WebIDL config file (e.g. Bindings.conf). 241 inputs is a 4-tuple describing the input .webidl files and how to 242 process them. Members are: 243 (set(.webidl files), set(basenames of exported files), 244 set(basenames of generated events files), 245 set(example interface names)) 246 247 exported_header_dir and codegen_dir are directories where generated 248 files will be written to. 249 state_path is the path to a file that will receive JSON state from our 250 actions. 251 make_deps_path is the path to a make dependency file that we can 252 optionally write. 253 make_deps_target is the target that receives the make dependencies. It 254 must be defined if using make_deps_path. 255 """ 256 self.populate_logger() 257 258 input_paths, exported_stems, generated_events_stems, example_interfaces = inputs 259 260 self._config_path = config_path 261 self._webidl_root = webidl_root 262 self._input_paths = set(input_paths) 263 self._exported_stems = set(exported_stems) 264 self._generated_events_stems = set(generated_events_stems) 265 self._generated_events_stems_as_array = generated_events_stems 266 self._example_interfaces = set(example_interfaces) 267 self._exported_header_dir = exported_header_dir 268 self._codegen_dir = codegen_dir 269 self._state_path = state_path 270 self._cache_dir = cache_dir 271 self._make_deps_path = make_deps_path 272 self._make_deps_target = make_deps_target 273 274 if (make_deps_path and not make_deps_target) or ( 275 not make_deps_path and make_deps_target 276 ): 277 raise Exception( 278 "Must define both make_deps_path and make_deps_target " 279 "if one is defined." 280 ) 281 282 self._parser_results = None 283 self._config = None 284 self._state = WebIDLCodegenManagerState() 285 286 if os.path.exists(state_path): 287 with open(state_path) as fh: 288 try: 289 self._state = WebIDLCodegenManagerState(fh=fh) 290 except Exception as e: 291 self.log( 292 logging.WARN, 293 "webidl_bad_state", 294 {"msg": str(e)}, 295 "Bad WebIDL state: {msg}", 296 ) 297 298 @property 299 def config(self): 300 if not self._config: 301 self._parse_webidl() 302 303 return self._config 304 305 def generate_build_files(self, *, processes=None): 306 """Generate files required for the build. 307 308 This function is in charge of generating all the .h/.cpp files derived 309 from input .webidl files. Please note that there are build actions 310 required to produce .webidl files and these build actions are 311 explicitly not captured here: this function assumes all .webidl files 312 are present and up to date. 313 314 This routine is called as part of the build to ensure files that need 315 to exist are present and up to date. This routine may not be called if 316 the build dependencies (generated as a result of calling this the first 317 time) say everything is up to date. 318 319 Because reprocessing outputs for every .webidl on every invocation 320 is expensive, we only regenerate the minimal set of files on every 321 invocation. The rules for deciding what needs done are roughly as 322 follows: 323 324 1. If any .webidl changes, reparse all .webidl files and regenerate 325 the global derived files. Only regenerate output files (.h/.cpp) 326 impacted by the modified .webidl files. 327 2. If an non-.webidl dependency (Python files, config file) changes, 328 assume everything is out of date and regenerate the world. This 329 is because changes in those could globally impact every output 330 file. 331 3. If an output file is missing, ensure it is present by performing 332 necessary regeneration. 333 334 if `processes` is set to None, run in parallel using the 335 multiprocess.Pool default. If set to 1, don't use extra processes. 336 """ 337 # Despite #1 above, we assume the build system is smart enough to not 338 # invoke us if nothing has changed. Therefore, any invocation means 339 # something has changed. And, if anything has changed, we need to 340 # parse the WebIDL. 341 self._parse_webidl() 342 343 result = BuildResult() 344 345 # If we parse, we always update globals - they are cheap and it is 346 # easier that way. 347 created, updated, unchanged = self._write_global_derived() 348 result.created |= created 349 result.updated |= updated 350 result.unchanged |= unchanged 351 352 # If any of the extra dependencies changed, regenerate the world. 353 global_changed, global_hashes = self._global_dependencies_changed() 354 if global_changed: 355 # Make a copy because we may modify. 356 changed_inputs = set(self._input_paths) 357 else: 358 changed_inputs = self._compute_changed_inputs() 359 360 self._state["global_depends"] = global_hashes 361 self._state["dictionaries_convertible_to_js"] = set( 362 d.identifier.name for d in self._config.getDictionariesConvertibleToJS() 363 ) 364 self._state["dictionaries_convertible_from_js"] = set( 365 d.identifier.name for d in self._config.getDictionariesConvertibleFromJS() 366 ) 367 368 # Distribute the generation load across several processes. This requires 369 # a) that `self' is serializable and b) that `self' is unchanged by 370 # _generate_build_files_for_webidl(...) 371 ordered_changed_inputs = sorted(changed_inputs) 372 pool = WebIDLPool(self, processes=processes) 373 generation_results = pool.run(ordered_changed_inputs) 374 375 # Generate bindings from .webidl files. 376 for filename, generation_result in zip( 377 ordered_changed_inputs, generation_results 378 ): 379 basename = mozpath.basename(filename) 380 result.inputs.add(filename) 381 written, deps = generation_result 382 result.created |= written[0] 383 result.updated |= written[1] 384 result.unchanged |= written[2] 385 386 self._state["webidls"][basename] = dict( 387 filename=filename, 388 outputs=written[0] | written[1] | written[2], 389 inputs=set(deps), 390 sha1=self._input_hashes[filename], 391 ) 392 393 # Process some special interfaces required for testing. 394 for interface in self._example_interfaces: 395 written = self.generate_example_files(interface) 396 result.created |= written[0] 397 result.updated |= written[1] 398 result.unchanged |= written[2] 399 400 # Generate a make dependency file. 401 if self._make_deps_path: 402 mk = Makefile() 403 codegen_rule = mk.create_rule([self._make_deps_target]) 404 codegen_rule.add_dependencies(global_hashes.keys()) 405 codegen_rule.add_dependencies(self._input_paths) 406 407 with FileAvoidWrite(self._make_deps_path) as fh: 408 mk.dump(fh) 409 410 self._save_state() 411 412 return result 413 414 def generate_example_files(self, interface): 415 """Generates example files for a given interface.""" 416 from Codegen import CGExampleRoot 417 418 root = CGExampleRoot(self.config, interface) 419 420 example_paths = self._example_paths(interface) 421 for path in example_paths: 422 self.log( 423 logging.INFO, 424 "webidl_generate_example_files", 425 {"filename": path}, 426 "Generating WebIDL example files derived from {filename}", 427 ) 428 429 return self._maybe_write_codegen(root, *example_paths) 430 431 def _parse_webidl(self): 432 import WebIDL 433 from Configuration import Configuration 434 435 self.log( 436 logging.INFO, 437 "webidl_parse", 438 {"count": len(self._input_paths)}, 439 "Parsing {count} WebIDL files.", 440 ) 441 442 hashes = {} 443 parser = WebIDL.Parser(self._cache_dir, lexer=None) 444 445 for path in sorted(self._input_paths): 446 with open(path, encoding="utf-8") as fh: 447 data = fh.read() 448 hashes[path] = hashlib.sha1(data.encode()).hexdigest() 449 parser.parse(data, path) 450 451 # Only these directories may contain WebIDL files with interfaces 452 # which are exposed to the web. WebIDL files in these roots may not 453 # be changed without DOM peer review. 454 # 455 # Other directories may contain WebIDL files as long as they only 456 # contain ChromeOnly interfaces. These are not subject to mandatory 457 # DOM peer review. 458 web_roots = ( 459 # The main WebIDL root. 460 self._webidl_root, 461 # The binding config root, which contains some test-only 462 # interfaces. 463 os.path.dirname(self._config_path), 464 # The objdir sub-directory which contains generated WebIDL files. 465 self._codegen_dir, 466 ) 467 468 self._parser_results = parser.finish() 469 self._config = Configuration( 470 self._config_path, 471 web_roots, 472 self._parser_results, 473 self._generated_events_stems_as_array, 474 ) 475 self._input_hashes = hashes 476 477 def _write_global_derived(self): 478 from Codegen import GlobalGenRoots 479 480 things = [("declare", f) for f in self.GLOBAL_DECLARE_FILES] 481 things.extend(("define", f) for f in self.GLOBAL_DEFINE_FILES) 482 483 result = (set(), set(), set()) 484 485 for what, filename in things: 486 stem = mozpath.splitext(filename)[0] 487 root = getattr(GlobalGenRoots, stem)(self._config) 488 489 if what == "declare": 490 code = root.declare() 491 output_root = self._exported_header_dir 492 elif what == "define": 493 code = root.define() 494 output_root = self._codegen_dir 495 else: 496 raise Exception("Unknown global gen type: %s" % what) 497 498 output_path = mozpath.join(output_root, filename) 499 self._maybe_write_file(output_path, code, result) 500 501 return result 502 503 def _compute_changed_inputs(self): 504 """Compute the set of input files that need to be regenerated.""" 505 changed_inputs = set() 506 expected_outputs = self.expected_build_output_files() 507 508 # Look for missing output files. 509 if any(not os.path.exists(f) for f in expected_outputs): 510 # FUTURE Bug 940469 Only regenerate minimum set. 511 changed_inputs |= self._input_paths 512 513 # That's it for examining output files. We /could/ examine SHA-1's of 514 # output files from a previous run to detect modifications. But that's 515 # a lot of extra work and most build systems don't do that anyway. 516 517 # Now we move on to the input files. 518 old_hashes = {v["filename"]: v["sha1"] for v in self._state["webidls"].values()} 519 520 old_filenames = set(old_hashes.keys()) 521 new_filenames = self._input_paths 522 523 # If an old file has disappeared or a new file has arrived, mark 524 # it. 525 changed_inputs |= old_filenames ^ new_filenames 526 527 # For the files in common between runs, compare content. If the file 528 # has changed, mark it. We don't need to perform mtime comparisons 529 # because content is a stronger validator. 530 for filename in old_filenames & new_filenames: 531 if old_hashes[filename] != self._input_hashes[filename]: 532 changed_inputs.add(filename) 533 534 # We've now populated the base set of inputs that have changed. 535 536 # Inherit dependencies from previous run. The full set of dependencies 537 # is associated with each record, so we don't need to perform any fancy 538 # graph traversal. 539 for v in self._state["webidls"].values(): 540 if any(dep for dep in v["inputs"] if dep in changed_inputs): 541 changed_inputs.add(v["filename"]) 542 543 # Now check for changes to the set of dictionaries that are convertible to JS 544 oldDictionariesConvertibleToJS = self._state["dictionaries_convertible_to_js"] 545 newDictionariesConvertibleToJS = self._config.getDictionariesConvertibleToJS() 546 newNames = set(d.identifier.name for d in newDictionariesConvertibleToJS) 547 changedDictionaryNames = oldDictionariesConvertibleToJS ^ newNames 548 549 # Now check for changes to the set of dictionaries that are convertible from JS 550 oldDictionariesConvertibleFromJS = self._state[ 551 "dictionaries_convertible_from_js" 552 ] 553 newDictionariesConvertibleFromJS = ( 554 self._config.getDictionariesConvertibleFromJS() 555 ) 556 newNames = set(d.identifier.name for d in newDictionariesConvertibleFromJS) 557 changedDictionaryNames |= oldDictionariesConvertibleFromJS ^ newNames 558 559 for name in changedDictionaryNames: 560 d = self._config.getDictionaryIfExists(name) 561 if d: 562 changed_inputs.add(d.filename) 563 564 # Only use paths that are known to our current state. 565 # This filters out files that were deleted or changed type (e.g. from 566 # static to preprocessed). 567 return changed_inputs & self._input_paths 568 569 def _binding_info(self, p): 570 """Compute binding metadata for an input path. 571 572 Returns a tuple of: 573 574 (stem, binding_stem, is_event, output_files) 575 576 output_files is itself a tuple. The first two items are the binding 577 header and C++ paths, respectively. The 2nd pair are the event header 578 and C++ paths or None if this isn't an event binding. 579 """ 580 basename = mozpath.basename(p) 581 stem = mozpath.splitext(basename)[0] 582 binding_stem = "%sBinding" % stem 583 584 if stem in self._exported_stems: 585 header_dir = self._exported_header_dir 586 else: 587 header_dir = self._codegen_dir 588 589 is_event = stem in self._generated_events_stems 590 591 files = ( 592 mozpath.join(header_dir, "%s.h" % binding_stem), 593 mozpath.join(self._codegen_dir, "%s.cpp" % binding_stem), 594 mozpath.join(header_dir, "%sFwd.h" % binding_stem), 595 mozpath.join(header_dir, "%s.h" % stem) if is_event else None, 596 mozpath.join(self._codegen_dir, "%s.cpp" % stem) if is_event else None, 597 ) 598 599 return stem, binding_stem, is_event, header_dir, files 600 601 def _example_paths(self, interface): 602 return ( 603 mozpath.join(self._codegen_dir, "%s-example.h" % interface), 604 mozpath.join(self._codegen_dir, "%s-example.cpp" % interface), 605 ) 606 607 def expected_build_output_files(self): 608 """Obtain the set of files generate_build_files() should write.""" 609 paths = set() 610 611 # Account for global generation. 612 for p in self.GLOBAL_DECLARE_FILES: 613 paths.add(mozpath.join(self._exported_header_dir, p)) 614 for p in self.GLOBAL_DEFINE_FILES: 615 paths.add(mozpath.join(self._codegen_dir, p)) 616 617 for p in self._input_paths: 618 stem, binding_stem, is_event, header_dir, files = self._binding_info(p) 619 paths |= {f for f in files if f} 620 621 for interface in self._example_interfaces: 622 for p in self._example_paths(interface): 623 paths.add(p) 624 625 return paths 626 627 # Parallelization of the generation step relies on this method not changing 628 # the internal state of the object 629 def _generate_build_files_for_webidl(self, filename): 630 from Codegen import CGBindingRoot, CGEventRoot 631 632 self.log( 633 logging.INFO, 634 "webidl_generate_build_for_input", 635 {"filename": filename}, 636 "Generating WebIDL files derived from {filename}", 637 ) 638 639 stem, binding_stem, is_event, header_dir, files = self._binding_info(filename) 640 root = CGBindingRoot(self._config, binding_stem, filename) 641 642 result = self._maybe_write_codegen(root, files[0], files[1], files[2]) 643 644 if is_event: 645 generated_event = CGEventRoot(self._config, stem) 646 result = self._maybe_write_codegen( 647 generated_event, files[3], files[4], result=result 648 ) 649 650 return result, root.deps() 651 652 def _global_dependencies_changed(self): 653 """Determine whether the global dependencies have changed.""" 654 current_files = set(iter_modules_in_path(mozpath.dirname(__file__))) 655 656 # We need to catch other .py files from /dom/bindings. We assume these 657 # are in the same directory as the config file. 658 current_files |= set(iter_modules_in_path(mozpath.dirname(self._config_path))) 659 660 current_files.add(self._config_path) 661 662 current_hashes = {} 663 for f in current_files: 664 # This will fail if the file doesn't exist. If a current global 665 # dependency doesn't exist, something else is wrong. 666 with open(f, "rb") as fh: 667 current_hashes[f] = hashlib.sha1(fh.read()).hexdigest() 668 669 # The set of files has changed. 670 if current_files ^ set(self._state["global_depends"].keys()): 671 return True, current_hashes 672 673 # Compare hashes. 674 for f, sha1 in current_hashes.items(): 675 if sha1 != self._state["global_depends"][f]: 676 return True, current_hashes 677 678 return False, current_hashes 679 680 def _save_state(self): 681 with open(self._state_path, "w", newline="\n") as fh: 682 self._state.dump(fh) 683 684 def _maybe_write_codegen( 685 self, 686 obj: CGThing, 687 declare_path, 688 define_path, 689 forward_declare_path=None, 690 result=None, 691 ): 692 assert declare_path and define_path 693 if not result: 694 result = (set(), set(), set()) 695 696 self._maybe_write_file(declare_path, obj.declare(), result) 697 self._maybe_write_file(define_path, obj.define(), result) 698 if forward_declare_path is not None: 699 self._maybe_write_file(forward_declare_path, obj.forward_declare(), result) 700 701 return result 702 703 def _maybe_write_file(self, path, content, result): 704 fh = FileAvoidWrite(path) 705 fh.write(content) 706 existed, updated = fh.close() 707 708 if not existed: 709 result[0].add(path) 710 elif updated: 711 result[1].add(path) 712 else: 713 result[2].add(path) 714 715 716 def create_build_system_manager(topsrcdir=None, topobjdir=None, dist_dir=None): 717 """Create a WebIDLCodegenManager for use by the build system.""" 718 if topsrcdir is None: 719 assert topobjdir is None and dist_dir is None 720 import buildconfig 721 722 topsrcdir = buildconfig.topsrcdir 723 topobjdir = buildconfig.topobjdir 724 dist_dir = buildconfig.substs["DIST"] 725 726 src_dir = os.path.join(topsrcdir, "dom", "bindings") 727 obj_dir = os.path.join(topobjdir, "dom", "bindings") 728 webidl_root = os.path.join(topsrcdir, "dom", "webidl") 729 730 with open(os.path.join(obj_dir, "file-lists.json")) as fh: 731 files = json.load(fh) 732 733 inputs = ( 734 files["webidls"], 735 files["exported_stems"], 736 files["generated_events_stems"], 737 files["example_interfaces"], 738 ) 739 740 cache_dir = os.path.join(obj_dir, "_cache") 741 try: 742 os.makedirs(cache_dir) 743 except OSError as e: 744 if e.errno != errno.EEXIST: 745 raise 746 747 return WebIDLCodegenManager( 748 os.path.join(src_dir, "Bindings.conf"), 749 webidl_root, 750 inputs, 751 os.path.join(dist_dir, "include", "mozilla", "dom"), 752 obj_dir, 753 os.path.join(obj_dir, "codegen.json"), 754 cache_dir=cache_dir, 755 # The make rules include a codegen.pp file containing dependencies. 756 make_deps_path=os.path.join(obj_dir, "codegen.pp"), 757 make_deps_target="webidl.stub", 758 )