perftest.py (13299B)
1 # This Source Code Form is subject to the terms of the Mozilla Public 2 # License, v. 2.0. If a copy of the MPL was not distributed with this 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 """ 5 This transform passes options from `mach perftest` to the corresponding task. 6 """ 7 8 from datetime import date, timedelta 9 10 from taskgraph.transforms.base import TransformSequence 11 from taskgraph.util import json 12 from taskgraph.util.copy import deepcopy 13 from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by 14 from taskgraph.util.treeherder import join_symbol, split_symbol 15 from voluptuous import Any, Extra, Optional 16 17 transforms = TransformSequence() 18 19 20 perftest_description_schema = Schema({ 21 # The test names and the symbols to use for them: [test-symbol, test-path] 22 Optional("perftest"): [[str]], 23 # Metrics to gather for the test. These will be merged 24 # with options specified through perftest-perfherder-global 25 Optional("perftest-metrics"): optionally_keyed_by( 26 "perftest", 27 Any( 28 [str], 29 {str: Any(None, {str: Any(None, str, [str])})}, 30 ), 31 ), 32 # Perfherder data options that will be applied to 33 # all metrics gathered. 34 Optional("perftest-perfherder-global"): optionally_keyed_by( 35 "perftest", {str: Any(None, str, [str])} 36 ), 37 # Extra options to add to the test's command 38 Optional("perftest-extra-options"): optionally_keyed_by("perftest", [str]), 39 # Variants of the test to make based on extra browsertime 40 # arguments. Expecting: 41 # [variant-suffix, options-to-use] 42 # If variant-suffix is `null` then the options will be added 43 # to the existing task. Otherwise, a new variant is created 44 # with the given suffix and with its options replaced. 45 Optional("perftest-btime-variants"): optionally_keyed_by( 46 "perftest", [[Any(None, str)]] 47 ), 48 # These options will be parsed in the next schemas 49 Extra: object, 50 }) 51 52 53 transforms.add_validate(perftest_description_schema) 54 55 56 @transforms.add 57 def split_tests(config, jobs): 58 for job in jobs: 59 if job.get("perftest") is None: 60 yield job 61 continue 62 63 for test_symbol, test_name in job.pop("perftest"): 64 job_new = deepcopy(job) 65 66 job_new["perftest"] = test_symbol 67 job_new["name"] += "-" + test_symbol 68 job_new["treeherder"]["symbol"] = job["treeherder"]["symbol"].format( 69 symbol=test_symbol 70 ) 71 job_new["run"]["command"] = job["run"]["command"].replace( 72 "{perftest_testname}", test_name 73 ) 74 75 yield job_new 76 77 78 @transforms.add 79 def handle_keyed_by_perftest(config, jobs): 80 fields = ["perftest-metrics", "perftest-extra-options", "perftest-btime-variants"] 81 for job in jobs: 82 if job.get("perftest") is None: 83 yield job 84 continue 85 86 for field in fields: 87 resolve_keyed_by(job, field, item_name=job["name"]) 88 89 job.pop("perftest") 90 yield job 91 92 93 @transforms.add 94 def parse_perftest_metrics(config, jobs): 95 """Parse the metrics into a dictionary immediately. 96 97 This way we can modify the extraOptions field (and others) entry through the 98 transforms that come later. The metrics aren't formatted until the end of the 99 transforms. 100 """ 101 for job in jobs: 102 if job.get("perftest-metrics") is None: 103 yield job 104 continue 105 perftest_metrics = job.pop("perftest-metrics") 106 107 # If perftest metrics is a string, split it up first 108 if isinstance(perftest_metrics, list): 109 new_metrics_info = [{"name": metric} for metric in perftest_metrics] 110 else: 111 new_metrics_info = [] 112 for metric, options in perftest_metrics.items(): 113 entry = {"name": metric} 114 entry.update(options) 115 new_metrics_info.append(entry) 116 117 job["perftest-metrics"] = new_metrics_info 118 yield job 119 120 121 @transforms.add 122 def split_perftest_variants(config, jobs): 123 for job in jobs: 124 if job.get("variants") is None: 125 yield job 126 continue 127 128 for variant in job.pop("variants"): 129 job_new = deepcopy(job) 130 131 group, symbol = split_symbol(job_new["treeherder"]["symbol"]) 132 group += "-" + variant 133 job_new["treeherder"]["symbol"] = join_symbol(group, symbol) 134 job_new["name"] += "-" + variant 135 job_new.setdefault("perftest-perfherder-global", {}).setdefault( 136 "extraOptions", [] 137 ).append(variant) 138 job_new[variant] = True 139 140 yield job_new 141 142 yield job 143 144 145 @transforms.add 146 def split_btime_variants(config, jobs): 147 for job in jobs: 148 if job.get("perftest-btime-variants") is None: 149 yield job 150 continue 151 152 variants = job.pop("perftest-btime-variants") 153 if not variants: 154 yield job 155 continue 156 157 yield_existing = False 158 for suffix, options in variants: 159 if suffix is None: 160 # Append options to the existing job 161 job.setdefault("perftest-btime-variants", []).append(options) 162 yield_existing = True 163 else: 164 job_new = deepcopy(job) 165 group, symbol = split_symbol(job_new["treeherder"]["symbol"]) 166 symbol += "-" + suffix 167 job_new["treeherder"]["symbol"] = join_symbol(group, symbol) 168 job_new["name"] += "-" + suffix 169 job_new.setdefault("perftest-perfherder-global", {}).setdefault( 170 "extraOptions", [] 171 ).append(suffix) 172 # Replace the existing options with the new ones 173 job_new["perftest-btime-variants"] = [options] 174 yield job_new 175 176 # The existing job has been modified so we should also return it 177 if yield_existing: 178 yield job 179 180 181 @transforms.add 182 def setup_http3_tests(config, jobs): 183 for job in jobs: 184 if job.get("http3") is None or not job.pop("http3"): 185 yield job 186 continue 187 job.setdefault("perftest-btime-variants", []).append( 188 "firefox.preference=network.http.http3.enable:true" 189 ) 190 yield job 191 192 193 @transforms.add 194 def setup_perftest_metrics(config, jobs): 195 for job in jobs: 196 if job.get("perftest-metrics") is None: 197 yield job 198 continue 199 perftest_metrics = job.pop("perftest-metrics") 200 201 # Options to apply to each metric 202 global_options = job.pop("perftest-perfherder-global", {}) 203 for metric_info in perftest_metrics: 204 for opt, val in global_options.items(): 205 if isinstance(val, list) and opt in metric_info: 206 metric_info[opt].extend(val) 207 elif not (isinstance(val, list) and len(val) == 0): 208 metric_info[opt] = val 209 210 quote_escape = '\\"' 211 if "win" in job.get("platform", ""): 212 # Escaping is a bit different on windows platforms 213 quote_escape = '\\\\\\"' 214 215 job["run"]["command"] = job["run"]["command"].replace( 216 "{perftest_metrics}", 217 " ".join([ 218 ",".join([ 219 ":".join([ 220 option, 221 str(value).replace(" ", "").replace("'", quote_escape), 222 ]) 223 for option, value in metric_info.items() 224 ]) 225 for metric_info in perftest_metrics 226 ]), 227 ) 228 229 yield job 230 231 232 @transforms.add 233 def setup_perftest_browsertime_variants(config, jobs): 234 for job in jobs: 235 if job.get("perftest-btime-variants") is None: 236 yield job 237 continue 238 239 job["run"]["command"] += " --browsertime-extra-options %s" % ",".join([ 240 opt.strip() for opt in job.pop("perftest-btime-variants") 241 ]) 242 243 yield job 244 245 246 @transforms.add 247 def setup_perftest_extra_options(config, jobs): 248 for job in jobs: 249 if job.get("perftest-extra-options") is None: 250 yield job 251 continue 252 job["run"]["command"] += " " + " ".join(job.pop("perftest-extra-options")) 253 yield job 254 255 256 @transforms.add 257 def create_duplicate_simpleperf_jobs(config, jobs): 258 for job in jobs: 259 if ( 260 "startup" in job["name"] 261 and "cold" not in job["name"] 262 and "chrome-m" not in job["name"] 263 ): 264 new_job = deepcopy(job) 265 new_job["run-on-projects"] = [] 266 new_job["attributes"] = {"cron": False} 267 new_job["dependencies"] = { 268 "android-aarch64-shippable": "build-android-aarch64-shippable/opt" 269 } 270 new_job["name"] += "-profiling" 271 new_job["run"]["command"] += ( 272 " --simpleperf --simpleperf-path $MOZ_FETCHES_DIR/android-simpleperf --geckoprofiler" 273 ) 274 new_job["description"] = str(new_job["description"]).replace( 275 "Run", "Profile" 276 ) 277 new_job["treeherder"]["symbol"] = str( 278 new_job["treeherder"]["symbol"] 279 ).replace(")", "-profile)") 280 new_job["fetches"]["toolchain"].extend([ 281 "linux64-android-simpleperf-linux-repack", 282 "linux64-samply", 283 "symbolicator-cli", 284 ]) 285 new_job["fetches"]["android-aarch64-shippable"] = [ 286 { 287 "artifact": "target.crashreporter-symbols.zip", 288 "extract": False, 289 } 290 ] 291 yield new_job 292 yield job 293 294 295 @transforms.add 296 def pass_perftest_options(config, jobs): 297 for job in jobs: 298 env = job.setdefault("worker", {}).setdefault("env", {}) 299 env["PERFTEST_OPTIONS"] = json.dumps( 300 config.params["try_task_config"].get("perftest-options") 301 ) 302 yield job 303 304 305 @transforms.add 306 def setup_perftest_test_date(config, jobs): 307 for job in jobs: 308 if ( 309 job.get("attributes", {}).get("batch", False) 310 and "--test-date" not in job["run"]["command"] 311 ): 312 yesterday = (date.today() - timedelta(1)).strftime("%Y.%m.%d") 313 job["run"]["command"] += " --test-date %s" % yesterday 314 yield job 315 316 317 @transforms.add 318 def setup_regression_detector(config, jobs): 319 for job in jobs: 320 if "change-detector" in job.get("name"): 321 tasks_to_analyze = [] 322 for task in config.params["try_task_config"].get("tasks", []): 323 # Explicitly skip these tasks since they're 324 # part of the mozperftest tasks 325 if "side-by-side" in task: 326 continue 327 if "change-detector" in task: 328 continue 329 330 # Select these tasks 331 if "browsertime" in task: 332 tasks_to_analyze.append(task) 333 elif "talos" in task: 334 tasks_to_analyze.append(task) 335 elif "awsy" in task: 336 tasks_to_analyze.append(task) 337 elif "perftest" in task: 338 tasks_to_analyze.append(task) 339 340 if len(tasks_to_analyze) == 0: 341 yield job 342 continue 343 344 # Make the change detector task depend on the tasks to analyze. 345 # This prevents the task from running until all data is available 346 # within the current push. 347 job["soft-dependencies"] = tasks_to_analyze 348 job["requires"] = "all-completed" 349 350 new_project = config.params["project"] 351 if ( 352 "try" in config.params["project"] 353 or config.params["try_mode"] == "try_select" 354 ): 355 new_project = "try" 356 357 base_project = None 358 if ( 359 config.params.get("try_task_config", {}) 360 .get("env", {}) 361 .get("PERF_BASE_REVISION", None) 362 is not None 363 ): 364 task_names = " --task-name ".join(tasks_to_analyze) 365 base_revision = config.params["try_task_config"]["env"][ 366 "PERF_BASE_REVISION" 367 ] 368 base_project = new_project 369 370 # Add all the required information to the task 371 job["run"]["command"] = job["run"]["command"].format( 372 task_name=task_names, 373 base_revision=base_revision, 374 base_branch=base_project, 375 new_branch=new_project, 376 new_revision=config.params["head_rev"], 377 ) 378 379 yield job 380 381 382 @transforms.add 383 def apply_perftest_tier_optimization(config, jobs): 384 for job in jobs: 385 job["optimization"] = {"skip-unless-backstop": None} 386 job["treeherder"]["tier"] = max(job["treeherder"]["tier"], 2) 387 yield job 388 389 390 @transforms.add 391 def set_perftest_attributes(config, jobs): 392 for job in jobs: 393 attributes = job.setdefault("attributes", {}) 394 attributes["perftest_name"] = job["name"] 395 yield job