BUILDRULES.py (24060B)
1 # Copyright (C) 2018 and later: Unicode, Inc. and others. 2 # License & terms of use: http://www.unicode.org/copyright.html 3 4 # Python 2/3 Compatibility (ICU-20299) 5 # TODO(ICU-20301): Remove this. 6 from __future__ import print_function 7 8 from icutools.databuilder import * 9 from icutools.databuilder import utils 10 from icutools.databuilder.request_types import * 11 12 import os 13 import sys 14 15 16 def generate(config, io, common_vars): 17 requests = [] 18 19 # By default, exclude collation data that mimics the order of some large legacy charsets. 20 # We do this in "subtractive" strategy by inserting a resourceFilter. 21 # Later rules from an explicit filter file may override this default behavior. 22 # (In "additive" strategy this is unnecessary.) 23 if config.strategy == "subtractive": 24 filters = config.filters_json_data.setdefault("resourceFilters", []) 25 omit_charset_collations = { 26 "categories": [ 27 "coll_tree" 28 ], 29 "rules": [ 30 "-/collations/big5han", 31 "-/collations/gb2312han" 32 ] 33 } 34 filters.insert(0, omit_charset_collations) 35 36 if len(io.glob("misc/*")) == 0: 37 print("Error: Cannot find data directory; please specify --src_dir", file=sys.stderr) 38 exit(1) 39 40 requests += generate_cnvalias(config, io, common_vars) 41 requests += generate_ulayout(config, io, common_vars) 42 requests += generate_uemoji(config, io, common_vars) 43 requests += generate_confusables(config, io, common_vars) 44 requests += generate_conversion_mappings(config, io, common_vars) 45 requests += generate_brkitr_brk(config, io, common_vars) 46 requests += generate_brkitr_lstm(config, io, common_vars) 47 requests += generate_brkitr_adaboost(config, io, common_vars) 48 requests += generate_stringprep(config, io, common_vars) 49 requests += generate_brkitr_dictionaries(config, io, common_vars) 50 requests += generate_normalization(config, io, common_vars) 51 requests += generate_coll_ucadata(config, io, common_vars) 52 requests += generate_full_unicore_data(config, io, common_vars) 53 requests += generate_unames(config, io, common_vars) 54 requests += generate_misc(config, io, common_vars) 55 requests += generate_curr_supplemental(config, io, common_vars) 56 requests += generate_zone_supplemental(config, io, common_vars) 57 requests += generate_translit(config, io, common_vars) 58 59 # Res Tree Files 60 # (input dirname, output dirname, resfiles.mk path, mk version var, mk source var, use pool file, dep files) 61 requests += generate_tree(config, io, common_vars, 62 "locales", 63 None, 64 config.use_pool_bundle, 65 [DepTarget("cnvalias")]) 66 67 requests += generate_tree(config, io, common_vars, 68 "curr", 69 "curr", 70 config.use_pool_bundle, 71 [DepTarget("cnvalias")]) 72 73 requests += generate_tree(config, io, common_vars, 74 "lang", 75 "lang", 76 config.use_pool_bundle, 77 [DepTarget("cnvalias")]) 78 79 requests += generate_tree(config, io, common_vars, 80 "region", 81 "region", 82 config.use_pool_bundle, 83 [DepTarget("cnvalias")]) 84 85 requests += generate_tree(config, io, common_vars, 86 "zone", 87 "zone", 88 config.use_pool_bundle, 89 [DepTarget("cnvalias")]) 90 91 requests += generate_tree(config, io, common_vars, 92 "unit", 93 "unit", 94 config.use_pool_bundle, 95 [DepTarget("cnvalias")]) 96 97 requests += generate_tree(config, io, common_vars, 98 "coll", 99 "coll", 100 # Never use pool bundle for coll, brkitr, or rbnf 101 False, 102 # Depends on timezoneTypes.res and keyTypeData.res. 103 # TODO: We should not need this dependency to build collation. 104 # TODO: Bake keyTypeData.res into the common library? 105 [DepTarget("coll_ucadata"), DepTarget("misc_res"), InFile("unidata/UCARules.txt"), DepTarget("cnvalias")]) 106 107 requests += generate_tree(config, io, common_vars, 108 "brkitr", 109 "brkitr", 110 # Never use pool bundle for coll, brkitr, or rbnf 111 False, 112 [DepTarget("brkitr_brk"), DepTarget("dictionaries"), DepTarget("cnvalias")]) 113 114 requests += generate_tree(config, io, common_vars, 115 "rbnf", 116 "rbnf", 117 # Never use pool bundle for coll, brkitr, or rbnf 118 False, 119 [DepTarget("cnvalias")]) 120 121 requests += [ 122 ListRequest( 123 name = "icudata_list", 124 variable_name = "icudata_all_output_files", 125 output_file = TmpFile("icudata.lst"), 126 include_tmp = False 127 ) 128 ] 129 130 return requests 131 132 133 def generate_cnvalias(config, io, common_vars): 134 # UConv Name Aliases 135 input_file = InFile("mappings/convrtrs.txt") 136 output_file = OutFile("cnvalias.icu") 137 return [ 138 SingleExecutionRequest( 139 name = "cnvalias", 140 category = "cnvalias", 141 dep_targets = [], 142 input_files = [input_file], 143 output_files = [output_file], 144 tool = IcuTool("gencnval"), 145 args = "-s {IN_DIR} -d {OUT_DIR} " 146 "{INPUT_FILES[0]}", 147 format_with = {} 148 ) 149 ] 150 151 152 def generate_confusables(config, io, common_vars): 153 # CONFUSABLES 154 txt1 = InFile("unidata/confusables.txt") 155 txt2 = InFile("unidata/confusablesWholeScript.txt") 156 cfu = OutFile("confusables.cfu") 157 return [ 158 SingleExecutionRequest( 159 name = "confusables", 160 category = "confusables", 161 dep_targets = [], 162 input_files = [txt1, txt2], 163 output_files = [cfu], 164 tool = IcuTool("gencfu"), 165 args = "-d {OUT_DIR} -i {OUT_DIR} " 166 "-c -r {IN_DIR}/{INPUT_FILES[0]} -w {IN_DIR}/{INPUT_FILES[1]} " 167 "-o {OUTPUT_FILES[0]}", 168 format_with = {} 169 ) 170 ] 171 172 173 def generate_conversion_mappings(config, io, common_vars): 174 # UConv Conversion Table Files 175 input_files = [InFile(filename) for filename in io.glob("mappings/*.ucm")] 176 output_files = [OutFile("%s.cnv" % v.filename[9:-4]) for v in input_files] 177 # TODO: handle BUILD_SPECIAL_CNV_FILES? Means to add --ignore-siso-check flag to makeconv 178 return [ 179 RepeatedOrSingleExecutionRequest( 180 name = "conversion_mappings", 181 category = "conversion_mappings", 182 dep_targets = [], 183 input_files = input_files, 184 output_files = output_files, 185 tool = IcuTool("makeconv"), 186 args = "-s {IN_DIR} -d {OUT_DIR} -c {INPUT_FILE_PLACEHOLDER}", 187 format_with = {}, 188 repeat_with = { 189 "INPUT_FILE_PLACEHOLDER": utils.SpaceSeparatedList(file.filename for file in input_files) 190 } 191 ) 192 ] 193 194 195 def generate_brkitr_brk(config, io, common_vars): 196 # BRK Files 197 input_files = [InFile(filename) for filename in io.glob("brkitr/rules/*.txt")] 198 output_files = [OutFile("brkitr/%s.brk" % v.filename[13:-4]) for v in input_files] 199 return [ 200 RepeatedExecutionRequest( 201 name = "brkitr_brk", 202 category = "brkitr_rules", 203 dep_targets = 204 [DepTarget("cnvalias"), 205 DepTarget("ulayout"), DepTarget("uemoji"), DepTarget("lstm_res"), DepTarget("adaboost_res")], 206 input_files = input_files, 207 output_files = output_files, 208 tool = IcuTool("genbrk"), 209 args = "-d {OUT_DIR} -i {OUT_DIR} " 210 "-c -r {IN_DIR}/{INPUT_FILE} " 211 "-o {OUTPUT_FILE}", 212 format_with = {}, 213 repeat_with = {} 214 ) 215 ] 216 217 218 def generate_stringprep(config, io, common_vars): 219 # SPP FILES 220 input_files = [InFile(filename) for filename in io.glob("sprep/*.txt")] 221 output_files = [OutFile("%s.spp" % v.filename[6:-4]) for v in input_files] 222 bundle_names = [v.filename[6:-4] for v in input_files] 223 return [ 224 RepeatedExecutionRequest( 225 name = "stringprep", 226 category = "stringprep", 227 dep_targets = [InFile("unidata/NormalizationCorrections.txt")], 228 input_files = input_files, 229 output_files = output_files, 230 tool = IcuTool("gensprep"), 231 args = "-s {IN_DIR}/sprep -d {OUT_DIR} -i {OUT_DIR} " 232 "-b {BUNDLE_NAME} -m {IN_DIR}/unidata -u 3.2.0 {BUNDLE_NAME}.txt", 233 format_with = {}, 234 repeat_with = { 235 "BUNDLE_NAME": bundle_names 236 } 237 ) 238 ] 239 240 241 def generate_brkitr_dictionaries(config, io, common_vars): 242 # Dict Files 243 input_files = [InFile(filename) for filename in io.glob("brkitr/dictionaries/*.txt")] 244 output_files = [OutFile("brkitr/%s.dict" % v.filename[20:-4]) for v in input_files] 245 extra_options_map = { 246 "brkitr/dictionaries/burmesedict.txt": "--bytes --transform offset-0x1000", 247 "brkitr/dictionaries/cjdict.txt": "--uchars", 248 "brkitr/dictionaries/khmerdict.txt": "--bytes --transform offset-0x1780", 249 "brkitr/dictionaries/laodict.txt": "--bytes --transform offset-0x0e80", 250 "brkitr/dictionaries/thaidict.txt": "--bytes --transform offset-0x0e00" 251 } 252 extra_optionses = [extra_options_map[v.filename] for v in input_files] 253 return [ 254 RepeatedExecutionRequest( 255 name = "dictionaries", 256 category = "brkitr_dictionaries", 257 dep_targets = [], 258 input_files = input_files, 259 output_files = output_files, 260 tool = IcuTool("gendict"), 261 args = "-i {OUT_DIR} " 262 "-c {EXTRA_OPTIONS} " 263 "{IN_DIR}/{INPUT_FILE} {OUT_DIR}/{OUTPUT_FILE}", 264 format_with = {}, 265 repeat_with = { 266 "EXTRA_OPTIONS": extra_optionses 267 } 268 ) 269 ] 270 271 272 def generate_normalization(config, io, common_vars): 273 # NRM Files 274 input_files = [InFile(filename) for filename in io.glob("in/*.nrm")] 275 # nfc.nrm is pre-compiled into C++; see generate_full_unicore_data 276 input_files.remove(InFile("in/nfc.nrm")) 277 output_files = [OutFile(v.filename[3:]) for v in input_files] 278 return [ 279 RepeatedExecutionRequest( 280 name = "normalization", 281 category = "normalization", 282 dep_targets = [], 283 input_files = input_files, 284 output_files = output_files, 285 tool = IcuTool("icupkg"), 286 args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILE} {OUT_DIR}/{OUTPUT_FILE}", 287 format_with = {}, 288 repeat_with = {} 289 ) 290 ] 291 292 293 def generate_coll_ucadata(config, io, common_vars): 294 # Collation Dependency File (ucadata.icu) 295 input_file = InFile("in/coll/ucadata-%s.icu" % config.coll_han_type) 296 output_file = OutFile("coll/ucadata.icu") 297 return [ 298 SingleExecutionRequest( 299 name = "coll_ucadata", 300 category = "coll_ucadata", 301 dep_targets = [], 302 input_files = [input_file], 303 output_files = [output_file], 304 tool = IcuTool("icupkg"), 305 args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}", 306 format_with = {} 307 ) 308 ] 309 310 311 def generate_full_unicore_data(config, io, common_vars): 312 # The core Unicode properties files (pnames.icu, uprops.icu, ucase.icu, ubidi.icu) 313 # are hardcoded in the common DLL and therefore not included in the data package any more. 314 # They are not built by default but need to be built for ICU4J data, 315 # both in the .jar and in the .dat file (if ICU4J uses the .dat file). 316 # See ICU-4497. 317 if not config.include_uni_core_data: 318 return [] 319 320 basenames = [ 321 "pnames.icu", 322 "uprops.icu", 323 "ucase.icu", 324 "ubidi.icu", 325 "nfc.nrm" 326 ] 327 input_files = [InFile("in/%s" % bn) for bn in basenames] 328 output_files = [OutFile(bn) for bn in basenames] 329 return [ 330 RepeatedExecutionRequest( 331 name = "unicore", 332 category = "unicore", 333 input_files = input_files, 334 output_files = output_files, 335 tool = IcuTool("icupkg"), 336 args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILE} {OUT_DIR}/{OUTPUT_FILE}" 337 ) 338 ] 339 340 341 def generate_unames(config, io, common_vars): 342 # Unicode Character Names 343 input_file = InFile("in/unames.icu") 344 output_file = OutFile("unames.icu") 345 return [ 346 SingleExecutionRequest( 347 name = "unames", 348 category = "unames", 349 dep_targets = [], 350 input_files = [input_file], 351 output_files = [output_file], 352 tool = IcuTool("icupkg"), 353 args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}", 354 format_with = {} 355 ) 356 ] 357 358 359 def generate_ulayout(config, io, common_vars): 360 # Unicode text layout properties 361 basename = "ulayout" 362 input_file = InFile("in/%s.icu" % basename) 363 output_file = OutFile("%s.icu" % basename) 364 return [ 365 SingleExecutionRequest( 366 name = basename, 367 category = basename, 368 dep_targets = [], 369 input_files = [input_file], 370 output_files = [output_file], 371 tool = IcuTool("icupkg"), 372 args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}", 373 format_with = {} 374 ) 375 ] 376 377 378 def generate_uemoji(config, io, common_vars): 379 # Unicode emoji properties 380 basename = "uemoji" 381 input_file = InFile("in/%s.icu" % basename) 382 output_file = OutFile("%s.icu" % basename) 383 return [ 384 SingleExecutionRequest( 385 name = basename, 386 category = basename, 387 dep_targets = [], 388 input_files = [input_file], 389 output_files = [output_file], 390 tool = IcuTool("icupkg"), 391 args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}", 392 format_with = {} 393 ) 394 ] 395 396 397 def generate_misc(config, io, common_vars): 398 # Misc Data Res Files 399 input_files = [InFile(filename) for filename in io.glob("misc/*.txt")] 400 input_basenames = [v.filename[5:] for v in input_files] 401 output_files = [OutFile("%s.res" % v[:-4]) for v in input_basenames] 402 return [ 403 RepeatedExecutionRequest( 404 name = "misc_res", 405 category = "misc", 406 dep_targets = [DepTarget("cnvalias")], 407 input_files = input_files, 408 output_files = output_files, 409 tool = IcuTool("genrb"), 410 args = "-s {IN_DIR}/misc -d {OUT_DIR} -i {OUT_DIR} " 411 "-k -q " 412 "{INPUT_BASENAME}", 413 format_with = {}, 414 repeat_with = { 415 "INPUT_BASENAME": input_basenames 416 } 417 ) 418 ] 419 420 421 def generate_curr_supplemental(config, io, common_vars): 422 # Currency Supplemental Res File 423 input_file = InFile("curr/supplementalData.txt") 424 input_basename = "supplementalData.txt" 425 output_file = OutFile("curr/supplementalData.res") 426 return [ 427 SingleExecutionRequest( 428 name = "curr_supplemental_res", 429 category = "curr_supplemental", 430 dep_targets = [DepTarget("cnvalias")], 431 input_files = [input_file], 432 output_files = [output_file], 433 tool = IcuTool("genrb"), 434 args = "-s {IN_DIR}/curr -d {OUT_DIR}/curr -i {OUT_DIR} " 435 "-k " 436 "{INPUT_BASENAME}", 437 format_with = { 438 "INPUT_BASENAME": input_basename 439 } 440 ) 441 ] 442 443 444 def generate_zone_supplemental(config, io, common_vars): 445 # tzdbNames Res File 446 input_file = InFile("zone/tzdbNames.txt") 447 input_basename = "tzdbNames.txt" 448 output_file = OutFile("zone/tzdbNames.res") 449 return [ 450 SingleExecutionRequest( 451 name = "zone_supplemental_res", 452 category = "zone_supplemental", 453 dep_targets = [DepTarget("cnvalias")], 454 input_files = [input_file], 455 output_files = [output_file], 456 tool = IcuTool("genrb"), 457 args = "-s {IN_DIR}/zone -d {OUT_DIR}/zone -i {OUT_DIR} " 458 "-k " 459 "{INPUT_BASENAME}", 460 format_with = { 461 "INPUT_BASENAME": input_basename 462 } 463 ) 464 ] 465 466 467 def generate_translit(config, io, common_vars): 468 input_files = [ 469 InFile("translit/root.txt"), 470 InFile("translit/en.txt"), 471 InFile("translit/el.txt") 472 ] 473 dep_files = set(InFile(filename) for filename in io.glob("translit/*.txt")) 474 dep_files -= set(input_files) 475 dep_files.add(DepTarget("cnvalias")) 476 dep_files = list(sorted(dep_files)) 477 input_basenames = [v.filename[9:] for v in input_files] 478 output_files = [ 479 OutFile("translit/%s.res" % v[:-4]) 480 for v in input_basenames 481 ] 482 return [ 483 RepeatedOrSingleExecutionRequest( 484 name = "translit_res", 485 category = "translit", 486 dep_targets = dep_files, 487 input_files = input_files, 488 output_files = output_files, 489 tool = IcuTool("genrb"), 490 args = "-s {IN_DIR}/translit -d {OUT_DIR}/translit -i {OUT_DIR} " 491 "-k " 492 "{INPUT_BASENAME}", 493 format_with = { 494 }, 495 repeat_with = { 496 "INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames) 497 } 498 ) 499 ] 500 501 502 def generate_brkitr_lstm(config, io, common_vars): 503 input_files = [InFile(filename) for filename in io.glob("brkitr/lstm/*.txt")] 504 input_basenames = [v.filename[12:] for v in input_files] 505 output_files = [ 506 OutFile("brkitr/%s.res" % v[:-4]) 507 for v in input_basenames 508 ] 509 return [ 510 RepeatedOrSingleExecutionRequest( 511 name = "lstm_res", 512 category = "brkitr_lstm", 513 dep_targets = [DepTarget("cnvalias")], 514 input_files = input_files, 515 output_files = output_files, 516 tool = IcuTool("genrb"), 517 args = "-s {IN_DIR}/brkitr/lstm -d {OUT_DIR}/brkitr -i {OUT_DIR} " 518 "-k " 519 "{INPUT_BASENAME}", 520 format_with = { 521 }, 522 repeat_with = { 523 "INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames) 524 } 525 ) 526 ] 527 528 def generate_brkitr_adaboost(config, io, common_vars): 529 input_files = [InFile(filename) for filename in io.glob("brkitr/adaboost/*.txt")] 530 input_basenames = [v.filename[16:] for v in input_files] 531 output_files = [ 532 OutFile("brkitr/%s.res" % v[:-4]) 533 for v in input_basenames 534 ] 535 return [ 536 RepeatedOrSingleExecutionRequest( 537 name = "adaboost_res", 538 category = "brkitr_adaboost", 539 dep_targets = [DepTarget("cnvalias")], 540 input_files = input_files, 541 output_files = output_files, 542 tool = IcuTool("genrb"), 543 args = "-s {IN_DIR}/brkitr/adaboost -d {OUT_DIR}/brkitr -i {OUT_DIR} " 544 "-k " 545 "{INPUT_BASENAME}", 546 format_with = { 547 }, 548 repeat_with = { 549 "INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames) 550 } 551 ) 552 ] 553 554 def generate_tree( 555 config, 556 io, 557 common_vars, 558 sub_dir, 559 out_sub_dir, 560 use_pool_bundle, 561 dep_targets): 562 requests = [] 563 category = "%s_tree" % sub_dir 564 out_prefix = "%s/" % out_sub_dir if out_sub_dir else "" 565 input_files = [InFile(filename) for filename in io.glob("%s/*.txt" % sub_dir)] 566 if sub_dir == "curr": 567 input_files.remove(InFile("curr/supplementalData.txt")) 568 if sub_dir == "zone": 569 input_files.remove(InFile("zone/tzdbNames.txt")) 570 input_basenames = [v.filename[len(sub_dir)+1:] for v in input_files] 571 output_files = [ 572 OutFile("%s%s.res" % (out_prefix, v[:-4])) 573 for v in input_basenames 574 ] 575 576 # Generate Pool Bundle 577 if use_pool_bundle: 578 input_pool_files = [OutFile("%spool.res" % out_prefix)] 579 pool_target_name = "%s_pool_write" % sub_dir 580 use_pool_bundle_option = "--usePoolBundle {OUT_DIR}/{OUT_PREFIX}".format( 581 OUT_PREFIX = out_prefix, 582 **common_vars 583 ) 584 requests += [ 585 SingleExecutionRequest( 586 name = pool_target_name, 587 category = category, 588 dep_targets = dep_targets, 589 input_files = input_files, 590 output_files = input_pool_files, 591 tool = IcuTool("genrb"), 592 args = "-s {IN_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} " 593 "--writePoolBundle -k " 594 "{INPUT_BASENAMES_SPACED}", 595 format_with = { 596 "IN_SUB_DIR": sub_dir, 597 "OUT_PREFIX": out_prefix, 598 "INPUT_BASENAMES_SPACED": utils.SpaceSeparatedList(input_basenames) 599 } 600 ), 601 ] 602 dep_targets = dep_targets + [DepTarget(pool_target_name)] 603 else: 604 use_pool_bundle_option = "" 605 606 # Generate Res File Tree 607 requests += [ 608 RepeatedOrSingleExecutionRequest( 609 name = "%s_res" % sub_dir, 610 category = category, 611 dep_targets = dep_targets, 612 input_files = input_files, 613 output_files = output_files, 614 tool = IcuTool("genrb"), 615 args = "-s {IN_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} " 616 "{EXTRA_OPTION} -k " 617 "{INPUT_BASENAME}", 618 format_with = { 619 "IN_SUB_DIR": sub_dir, 620 "OUT_PREFIX": out_prefix, 621 "EXTRA_OPTION": use_pool_bundle_option 622 }, 623 repeat_with = { 624 "INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames) 625 } 626 ) 627 ] 628 629 # Generate res_index file 630 # Exclude the deprecated locale variants and root; see ICU-20628. This 631 # could be data-driven, but we do not want to perform I/O in this script 632 # (for example, we do not want to read from an XML file). 633 excluded_locales = set([ 634 "ja_JP_TRADITIONAL", 635 "th_TH_TRADITIONAL", 636 "de_", 637 "de__PHONEBOOK", 638 "es_", 639 "es__TRADITIONAL", 640 "root", 641 ]) 642 # Put alias locales in a separate structure; see ICU-20627 643 dependency_data = io.read_locale_deps(sub_dir) 644 if "aliases" in dependency_data: 645 alias_locales = set(dependency_data["aliases"].keys()) 646 else: 647 alias_locales = set() 648 alias_files = [] 649 installed_files = [] 650 for f in input_files: 651 file_stem = IndexRequest.locale_file_stem(f) 652 if file_stem in excluded_locales: 653 continue 654 destination = alias_files if file_stem in alias_locales else installed_files 655 destination.append(f) 656 cldr_version = dependency_data["cldrVersion"] if sub_dir == "locales" else None 657 index_file_txt = TmpFile("{IN_SUB_DIR}/{INDEX_NAME}.txt".format( 658 IN_SUB_DIR = sub_dir, 659 **common_vars 660 )) 661 index_res_file = OutFile("{OUT_PREFIX}{INDEX_NAME}.res".format( 662 OUT_PREFIX = out_prefix, 663 **common_vars 664 )) 665 index_file_target_name = "%s_index_txt" % sub_dir 666 requests += [ 667 IndexRequest( 668 name = index_file_target_name, 669 category = category, 670 dep_targets = [DepTarget("cnvalias")], 671 installed_files = installed_files, 672 alias_files = alias_files, 673 txt_file = index_file_txt, 674 output_file = index_res_file, 675 cldr_version = cldr_version, 676 args = "-s {TMP_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} " 677 "-k " 678 "{INDEX_NAME}.txt", 679 format_with = { 680 "IN_SUB_DIR": sub_dir, 681 "OUT_PREFIX": out_prefix 682 } 683 ) 684 ] 685 686 return requests