test_actions.py (24830B)
1 # This Source Code Form is subject to the terms of the Mozilla Public 2 # License, v. 2.0. If a copy of the MPL was not distributed with this 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 5 6 import pytest 7 import yaml 8 from mozunit import main 9 from pytest_taskgraph import make_graph, make_task 10 from taskgraph import create 11 from taskgraph.util import json 12 from taskgraph.util.taskcluster import get_task_definition 13 14 from gecko_taskgraph import decision 15 from gecko_taskgraph.actions import trigger_action_callback 16 17 ROOT_URL = "https://taskcluster.example.com" 18 19 20 @pytest.fixture(autouse=True) 21 def mock_root_url(monkeypatch): 22 monkeypatch.delenv("TASKCLUSTER_PROXY_URL", raising=False) 23 monkeypatch.setenv("TASKCLUSTER_ROOT_URL", ROOT_URL) 24 25 26 @pytest.fixture(autouse=True) 27 def clear_caches(): 28 yield 29 get_task_definition.cache_clear() 30 31 32 @pytest.fixture 33 def artifact_dir(monkeypatch, tmp_path): 34 artifact_dir = tmp_path / "artifacts" 35 monkeypatch.setattr(decision, "ARTIFACTS_DIR", str(artifact_dir)) 36 return artifact_dir 37 38 39 @pytest.fixture 40 def get_artifact(artifact_dir): 41 def inner(artifact_name): 42 return json.loads((artifact_dir / artifact_name).read_text()) 43 44 return inner 45 46 47 @pytest.fixture 48 def run_action(mocker, monkeypatch, parameters, graph_config): 49 monkeypatch.setattr(create, "testing", True) 50 mocker.patch("gecko_taskgraph.actions.registry.sanity_check_task_scope") 51 52 def inner(name, params=None, **kwargs): 53 if params: 54 parameters.update(params) 55 56 kwargs.setdefault("task_group_id", "gid") 57 kwargs.setdefault("task_id", "tid") 58 kwargs.setdefault("input", None) 59 ret = trigger_action_callback( 60 callback=name, 61 parameters=parameters, 62 root=graph_config.root_dir, 63 **kwargs, 64 ) 65 return ret 66 67 return inner 68 69 70 def test_cancel(responses, run_action): 71 task_id = "abc" 72 73 responses.post(f"{ROOT_URL}/api/queue/v1/task/{task_id}/cancel", status=200) 74 75 run_action("cancel", task_id=task_id, input={"task_id": task_id}) 76 77 78 def test_cancel_all(monkeypatch, responses, run_action): 79 group_id = "abc" 80 81 # Validate action task doesn't cancel itself. 82 monkeypatch.setenv("TASK_ID", group_id) 83 84 responses.get( 85 f"{ROOT_URL}/api/queue/v1/task-group/{group_id}/list", 86 status=200, 87 json={ 88 "tasks": [ 89 {"status": {"taskId": group_id, "state": "running"}}, 90 {"status": {"taskId": "a", "state": "running"}}, 91 {"status": {"taskId": "b", "state": "completed"}}, 92 {"status": {"taskId": "c", "state": "pending"}}, 93 {"status": {"taskId": "d", "state": "unscheduled"}}, 94 ] 95 }, 96 ) 97 98 responses.post(f"{ROOT_URL}/api/queue/v1/task/a/cancel", status=200) 99 responses.post(f"{ROOT_URL}/api/queue/v1/task/c/cancel", status=200) 100 responses.post(f"{ROOT_URL}/api/queue/v1/task/d/cancel", status=200) 101 102 run_action( 103 "cancel-all", 104 task_group_id=group_id, 105 input={"task_group_id": group_id}, 106 ) 107 108 109 def test_rebuild_cached_tasks(mocker, run_action, get_artifact): 110 graph = make_graph( 111 make_task( 112 label="foo", attributes={"cached_task": True}, task_def={"name": "foo"} 113 ), 114 make_task(label="bar", task_def={"name": "bar"}), 115 ) 116 m = mocker.patch( 117 "gecko_taskgraph.actions.rebuild_cached_tasks.fetch_graph_and_labels" 118 ) 119 m.return_value = ( 120 "gid", 121 graph, 122 {label: "tid" for label in graph.tasks.keys()}, 123 None, 124 ) 125 126 run_action("rebuild-cached-tasks") 127 to_run = get_artifact("to-run.json") 128 assert "foo" in to_run 129 assert "bar" not in to_run 130 131 132 def test_add_new_jobs(mocker, run_action, get_artifact): 133 graph = make_graph( 134 make_task(label="foo", task_def={"name": "foo"}), 135 make_task(label="bar", task_def={"name": "bar"}), 136 ) 137 m = mocker.patch("gecko_taskgraph.actions.add_new_jobs.fetch_graph_and_labels") 138 m.return_value = ("gid", graph, {}, None) 139 140 run_action("add-new-jobs", input={"tasks": ["foo"], "times": 1}) 141 142 to_run = get_artifact("to-run.json") 143 assert "foo" in to_run 144 assert "bar" not in to_run 145 146 147 def test_add_talos(mocker, run_action, get_artifact): 148 graph = make_graph( 149 make_task( 150 label="test-linux-talos", 151 attributes={"talos_try_name": "talos"}, 152 task_def={"name": "test-linux-talos"}, 153 ), 154 make_task(label="build", task_def={"name": "build"}), 155 ) 156 m = mocker.patch("gecko_taskgraph.actions.add_talos.fetch_graph_and_labels") 157 m.return_value = ("gid", graph, {}, None) 158 mocker.patch("gecko_taskgraph.actions.add_talos.standard_filter", return_value=True) 159 160 run_action("run-all-talos", input={"times": 1}) 161 162 to_run = get_artifact("to-run.json") 163 assert "test-linux-talos" in to_run 164 assert "build" not in to_run 165 166 167 def test_purge_caches(responses, run_action): 168 task_id = "abc" 169 task_def = { 170 "payload": {"cache": {"cache1": "path1"}}, 171 "provisionerId": "proj-gecko", 172 "workerType": "linux", 173 } 174 175 responses.get( 176 f"{ROOT_URL}/api/queue/v1/task/{task_id}", 177 status=200, 178 json=task_def, 179 ) 180 responses.post( 181 f"{ROOT_URL}/api/purge-cache/v1/purge-cache/proj-gecko%2Flinux", 182 status=200, 183 json={}, 184 ) 185 186 run_action("purge-cache", task_id=task_id) 187 188 189 def test_openh264(mocker, run_action, get_artifact): 190 graph = make_graph( 191 make_task( 192 label="openh264-build", kind="openh264", task_def={"name": "openh264-build"} 193 ), 194 make_task(label="build", kind="build", task_def={"name": "build"}), 195 ) 196 m = mocker.patch("gecko_taskgraph.actions.openh264.fetch_graph_and_labels") 197 m.return_value = ("gid", graph, {}, None) 198 199 run_action("openh264") 200 201 to_run = get_artifact("to-run.json") 202 assert "openh264-build" in to_run 203 assert "build" not in to_run 204 205 206 def test_googleplay(mocker, run_action, get_artifact): 207 graph = make_graph( 208 make_task( 209 label="push-fenix", 210 kind="push-bundle", 211 attributes={"build-type": "fenix-nightly"}, 212 task_def={"name": "push-fenix"}, 213 ), 214 make_task(label="build", kind="build", task_def={"name": "build"}), 215 ) 216 m = mocker.patch("gecko_taskgraph.actions.googleplay.fetch_graph_and_labels") 217 m.return_value = ("gid", graph, {}, None) 218 219 run_action("googleplay", params={"project": "mozilla-central"}) 220 221 to_run = get_artifact("to-run.json") 222 assert "push-fenix" in to_run 223 assert "build" not in to_run 224 225 226 def test_raptor_extra_options(mocker, responses, run_action, get_artifact): 227 task_id = "tid" 228 task_def = { 229 "metadata": {"name": "test-raptor"}, 230 "payload": {"env": {}}, 231 "extra": {"treeherder": {"symbol": "rap", "groupName": "Raptor"}}, 232 } 233 graph = make_graph(make_task(label="test-raptor", task_def=task_def)) 234 235 responses.get( 236 f"{ROOT_URL}/api/queue/v1/task/{task_id}", 237 status=200, 238 json=task_def, 239 ) 240 m = mocker.patch( 241 "gecko_taskgraph.actions.raptor_extra_options.fetch_graph_and_labels" 242 ) 243 m.return_value = ("gid", graph, {"test-raptor": "tid"}, None) 244 245 run_action( 246 "raptor-extra-options", task_id=task_id, input={"extra_options": "verbose"} 247 ) 248 249 to_run = get_artifact("to-run.json") 250 assert "test-raptor" in to_run 251 252 253 def test_run_missing_tests(mocker, responses, run_action, get_artifact): 254 graph = make_graph( 255 make_task(label="test-foo", kind="test", task_def={"name": "test-foo"}), 256 make_task(label="test-bar", kind="test", task_def={"name": "test-bar"}), 257 make_task(label="build", kind="build", task_def={"name": "build"}), 258 ) 259 m = mocker.patch("gecko_taskgraph.actions.run_missing_tests.fetch_graph_and_labels") 260 m.return_value = ("gid", graph, {"test-foo": "tid1"}, None) 261 262 responses.get( 263 f"{ROOT_URL}/api/queue/v1/task/gid/artifacts/public%2Ftarget-tasks.json", 264 status=200, 265 json={"test-foo": {}, "test-bar": {}}, 266 ) 267 268 run_action("run-missing-tests") 269 270 to_run = get_artifact("to-run.json") 271 assert "test-bar" in to_run 272 assert "test-foo" not in to_run 273 assert "build" not in to_run 274 275 276 def test_scriptworker_canary(mocker, run_action, graph_config): 277 m = mocker.patch("gecko_taskgraph.actions.scriptworker_canary.taskgraph_decision") 278 279 run_action("scriptworker-canary", input={"scriptworkers": ["balrog", "shipit"]}) 280 281 m.assert_called_once() 282 args, kwargs = m.call_args 283 assert args[0] == {"root": graph_config.root_dir} 284 assert kwargs["parameters"]["target_tasks_method"] == "scriptworker_canary" 285 assert kwargs["parameters"]["try_task_config"] == { 286 "scriptworker-canary-workers": ["balrog", "shipit"] 287 } 288 assert kwargs["parameters"]["tasks_for"] == "action" 289 290 291 def test_merge_automation(mocker, run_action, graph_config): 292 m = mocker.patch("gecko_taskgraph.actions.merge_automation.taskgraph_decision") 293 294 run_action( 295 "merge-automation", 296 params={"project": "mozilla-central"}, 297 input={"behavior": "bump-main"}, 298 ) 299 300 m.assert_called_once() 301 args, kwargs = m.call_args 302 assert args[0] == {"root": graph_config.root_dir} 303 assert kwargs["parameters"]["target_tasks_method"] == "merge_automation" 304 assert kwargs["parameters"]["merge_config"] == { 305 "force-dry-run": False, 306 "behavior": "bump-main", 307 } 308 assert kwargs["parameters"]["tasks_for"] == "action" 309 310 311 def test_retrigger(mocker, responses, run_action, get_artifact): 312 task_def = { 313 "metadata": {"name": "test-task"}, 314 "payload": {}, 315 } 316 graph = make_graph( 317 make_task(label="test-task", attributes={"retrigger": True}, task_def=task_def) 318 ) 319 320 responses.get( 321 f"{ROOT_URL}/api/queue/v1/task/tid", 322 status=200, 323 json=task_def, 324 ) 325 m = mocker.patch("gecko_taskgraph.actions.retrigger.fetch_graph_and_labels") 326 m.return_value = ("gid", graph, {}, None) 327 328 run_action("retrigger", input={"force": True}) 329 330 to_run = get_artifact("to-run.json") 331 assert "test-task" in to_run 332 333 334 def test_retrigger_custom(mocker, responses, run_action, capsys): 335 task_def = { 336 "metadata": {"name": "test-mochitest"}, 337 "payload": {"command": ["run"], "env": {}}, 338 "tags": {"test-type": "mochitest"}, 339 "extra": {"treeherder": {"symbol": "M"}}, 340 } 341 graph = make_graph(make_task(label="test-mochitest", task_def=task_def)) 342 343 responses.get( 344 f"{ROOT_URL}/api/queue/v1/task/tid", 345 status=200, 346 json=task_def, 347 ) 348 m = mocker.patch("gecko_taskgraph.actions.retrigger_custom.fetch_graph_and_labels") 349 m.return_value = ("gid", graph, {"test-mochitest": "dtid"}, None) 350 351 run_action("retrigger-custom", input={"path": "test/path"}) 352 353 captured = capsys.readouterr() 354 assert "test-mochitest" in captured.out 355 assert "--no-run-tests" in captured.out 356 assert "test/path" in captured.out 357 assert "M-custom" in captured.out 358 359 360 def test_create_interactive(mocker, responses, monkeypatch, run_action, get_artifact): 361 monkeypatch.setenv("TASK_ID", "action-task-id") 362 task_def = { 363 "metadata": {"name": "test-task"}, 364 "payload": { 365 "env": {}, 366 "maxRunTime": 3600, 367 "cache": {}, 368 "artifacts": {}, 369 }, 370 "scopes": [], 371 "extra": {"treeherder": {"symbol": "T"}}, 372 } 373 graph = make_graph(make_task(label="test-task", task_def=task_def)) 374 375 responses.get( 376 f"{ROOT_URL}/api/queue/v1/task/tid", 377 status=200, 378 json=task_def, 379 ) 380 m = mocker.patch( 381 "gecko_taskgraph.actions.create_interactive.fetch_graph_and_labels" 382 ) 383 m.return_value = ("gid", graph, {}, None) 384 385 run_action("create-interactive", input={"notify": "test@example.com"}) 386 387 to_run = get_artifact("to-run.json") 388 assert "test-task" in to_run 389 390 391 def test_backfill_task(mocker, run_action, get_artifact): 392 graph = make_graph( 393 make_task(label="test-task", task_def={"name": "test-task"}), 394 ) 395 m = mocker.patch("gecko_taskgraph.actions.backfill.fetch_graph_and_labels") 396 m.return_value = ("gid", graph, {}, None) 397 mocker.patch("gecko_taskgraph.actions.backfill.combine_task_graph_files") 398 399 run_action( 400 "backfill-task", 401 input={"label": "test-task", "revision": "abc123", "symbol": "T"}, 402 ) 403 404 to_run = get_artifact("to-run-0.json") 405 assert "test-task" in to_run 406 407 408 def test_confirm_failures(mocker, responses, run_action, get_artifact): 409 task_id = "test-task-id" 410 task_def = { 411 "metadata": {"name": "test-mochitest"}, 412 "extra": {"suite": "mochitest"}, 413 "payload": {"command": ["run-tests"], "env": {}}, 414 } 415 graph = make_graph( 416 make_task( 417 label="test-mochitest-cf", 418 task_def={ 419 "name": "test-mochitest-cf", 420 "payload": {"command": ["run-tests"], "env": {}}, 421 "metadata": {"name": "test-mochitest-cf"}, 422 "tags": {}, 423 }, 424 ), 425 ) 426 427 responses.get( 428 f"{ROOT_URL}/api/queue/v1/task/{task_id}/artifacts", 429 status=200, 430 json={ 431 "artifacts": [ 432 {"name": "public/logs/live_backing.log"}, 433 {"name": "public/logs/errorsummary.log"}, 434 ] 435 }, 436 ) 437 438 errorsummary_content = b"\n".join([ 439 b'{"test": "dom/tests/test_example.html", "status": "FAIL", "expected": "PASS", "group": "dom/tests"}', 440 b'{"test": "dom/tests/test_another.html", "status": "FAIL", "expected": "PASS", "group": "dom/tests"}', 441 ]) 442 responses.get( 443 f"{ROOT_URL}/api/queue/v1/task/{task_id}/artifacts/public%2Flogs%2Ferrorsummary.log", 444 status=200, 445 body=errorsummary_content, 446 ) 447 448 responses.get( 449 f"{ROOT_URL}/api/queue/v1/task/{task_id}", 450 status=200, 451 json=task_def, 452 ) 453 m = mocker.patch("gecko_taskgraph.actions.confirm_failure.fetch_graph_and_labels") 454 m.return_value = ("gid", graph, {}, None) 455 456 run_action("confirm-failures", task_id=task_id) 457 458 to_run = get_artifact("to-run.json") 459 assert "test-mochitest-cf" in to_run 460 461 462 def test_confirm_failures_retrigger(mocker, responses, run_action): 463 task_id = "test-task-id" 464 task_def = { 465 "metadata": {"name": "test-mochitest"}, 466 "extra": {"suite": "mochitest"}, 467 } 468 graph = make_graph( 469 make_task( 470 label="test-mochitest", 471 attributes={"retrigger": True}, 472 task_def={"name": "test-mochitest"}, 473 ), 474 make_task(label="test-mochitest-cf", task_def={"name": "test-mochitest-cf"}), 475 ) 476 477 responses.get( 478 f"{ROOT_URL}/api/queue/v1/task/{task_id}/artifacts", 479 status=200, 480 json={"artifacts": [{"name": "public/logs/live_backing.log"}]}, 481 ) 482 483 responses.get( 484 f"{ROOT_URL}/api/queue/v1/task/{task_id}", 485 status=200, 486 json=task_def, 487 ) 488 m = mocker.patch("gecko_taskgraph.actions.confirm_failure.fetch_graph_and_labels") 489 m.return_value = ("gid", graph, {}, None) 490 retrigger_mock = mocker.patch( 491 "gecko_taskgraph.actions.confirm_failure.retrigger_action" 492 ) 493 494 run_action("confirm-failures", task_id=task_id) 495 496 retrigger_mock.assert_called_once() 497 498 499 def test_rerun(mocker, responses, run_action): 500 task_id = "tid" 501 task_def = {"metadata": {"name": "test-task"}} 502 503 responses.get( 504 f"{ROOT_URL}/api/queue/v1/task/{task_id}", 505 status=200, 506 json=task_def, 507 ) 508 responses.get( 509 f"{ROOT_URL}/api/queue/v1/task/{task_id}/status", 510 status=200, 511 json={"status": {"state": "failed"}}, 512 ) 513 responses.post(f"{ROOT_URL}/api/queue/v1/task/{task_id}/rerun", status=200) 514 515 graph = make_graph(make_task(label="test-task", task_def=task_def)) 516 m = mocker.patch("gecko_taskgraph.actions.retrigger.fetch_graph_and_labels") 517 m.return_value = ("gid", graph, {"test-task": [task_id]}, {"test-task": [task_id]}) 518 519 run_action("rerun", task_id=task_id) 520 521 522 def test_retrigger_decision(responses, run_action, capsys): 523 task_def = { 524 "taskGroupId": "tgid", 525 "schedulerId": "scheduler", 526 "provisionerId": "provisioner", 527 "workerType": "worker", 528 "created": "2024-01-01T00:00:00.000Z", 529 "deadline": "2024-01-01T01:00:00.000Z", 530 "expires": "2024-01-02T00:00:00.000Z", 531 "metadata": {"name": "decision-task"}, 532 "payload": {}, 533 "tags": {}, 534 "extra": {}, 535 } 536 537 responses.get( 538 f"{ROOT_URL}/api/queue/v1/task/tid", 539 status=200, 540 json=task_def, 541 ) 542 543 run_action("retrigger-decision", params={"level": "1"}) 544 545 captured = capsys.readouterr() 546 assert "decision-task" in captured.out 547 assert "gecko-level-1" in captured.out 548 assert "retrigger-decision-task" in captured.out 549 550 551 def test_retrigger_multiple(mocker, run_action, get_artifact): 552 graph = make_graph( 553 make_task( 554 label="test-task", 555 attributes={"retrigger": True}, 556 task_def={"name": "test-task"}, 557 ), 558 ) 559 560 m = mocker.patch("gecko_taskgraph.actions.retrigger.fetch_graph_and_labels") 561 m.return_value = ("gid", graph, {}, {"test-task": ["tid"]}) 562 563 run_action( 564 "retrigger-multiple", 565 input={"requests": [{"tasks": ["test-task"], "times": 2}]}, 566 ) 567 568 to_run = get_artifact("to-run.json") 569 assert "test-task" in to_run 570 571 572 def test_retrigger_multiple_rerun(mocker, responses, run_action): 573 task_id = "rerun-task-id" 574 graph = make_graph( 575 make_task( 576 label="test-task", 577 attributes={"retrigger": False}, 578 task_def={"name": "test-task"}, 579 ), 580 ) 581 582 m = mocker.patch("gecko_taskgraph.actions.retrigger.fetch_graph_and_labels") 583 m.return_value = ("gid", graph, {}, {"test-task": [task_id]}) 584 585 responses.get( 586 f"{ROOT_URL}/api/queue/v1/task/{task_id}/status", 587 status=200, 588 json={"status": {"state": "failed"}}, 589 ) 590 responses.post(f"{ROOT_URL}/api/queue/v1/task/{task_id}/rerun", status=200) 591 592 run_action( 593 "retrigger-multiple", 594 input={"requests": [{"tasks": ["test-task"], "times": 2}]}, 595 ) 596 597 598 def test_add_all_browsertime(mocker, run_action, get_artifact): 599 graph = make_graph( 600 make_task( 601 label="raptor-browsertime", 602 kind="test", 603 attributes={ 604 "raptor_try_name": "browsertime-firefox", 605 "test_platform": "linux64-shippable-qr/opt", 606 "run_on_projects": ["mozilla-central"], 607 }, 608 task_def={"name": "raptor-browsertime", "extra": {"suite": "raptor"}}, 609 ), 610 make_task(label="build", kind="build", task_def={"name": "build"}), 611 ) 612 613 m = mocker.patch("gecko_taskgraph.actions.backfill.fetch_graph_and_labels") 614 m.return_value = ("gid", graph, {}, None) 615 616 run_action("add-all-browsertime", params={"project": "mozilla-central"}) 617 618 to_run = get_artifact("to-run.json") 619 assert "raptor-browsertime" in to_run 620 assert "build" not in to_run 621 622 623 def test_gecko_profile(mocker, responses, run_action, get_artifact): 624 task_id = "tid" 625 task_def = { 626 "metadata": {"name": "test-raptor"}, 627 "payload": {"command": [["run-tests"]], "env": {}}, 628 "extra": { 629 "suite": "raptor", 630 "treeherder": {"symbol": "R", "groupName": "Raptor"}, 631 }, 632 } 633 graph = make_graph( 634 make_task( 635 label="test-raptor", 636 kind="test", 637 attributes={"unittest_suite": "raptor"}, 638 task_def={ 639 "name": "test-raptor", 640 "payload": {"command": [["run-tests"]], "env": {}}, 641 "extra": { 642 "suite": "raptor", 643 "treeherder": {"symbol": "R", "groupName": "Raptor"}, 644 }, 645 }, 646 ) 647 ) 648 649 responses.get( 650 "http://hg.example.com/json-pushes?version=2&startID=99&endID=100", 651 status=200, 652 json={"pushes": {"100": {"changesets": ["abc123"]}}}, 653 ) 654 655 responses.get( 656 f"{ROOT_URL}/api/queue/v1/task/{task_id}", 657 status=200, 658 json=task_def, 659 ) 660 661 responses.get( 662 f"{ROOT_URL}/api/index/v1/task/gecko.v2.some-project.pushlog-id.100.decision/artifacts/public%2Fparameters.yml", 663 status=200, 664 body=yaml.dump({"pushlog_id": "100", "project": "autoland", "level": "1"}), 665 content_type="application/x-yaml", 666 ) 667 m = mocker.patch("gecko_taskgraph.actions.gecko_profile.fetch_graph_and_labels") 668 m.return_value = ("gid", graph, {"test-raptor": "tid"}, None) 669 mocker.patch("gecko_taskgraph.actions.gecko_profile.combine_task_graph_files") 670 671 run_action( 672 "geckoprofile", 673 task_id=task_id, 674 params={"pushlog_id": "100", "head_repository": "http://hg.example.com"}, 675 input={"depth": 1, "gecko_profile_interval": 5}, 676 ) 677 678 to_run = get_artifact("to-run-100.json") 679 assert "test-raptor" in to_run 680 681 682 def test_side_by_side(mocker, responses, run_action, get_artifact): 683 task_id = "tid" 684 task_def = { 685 "metadata": {"name": "linux/opt-browsertime-tp6"}, 686 "extra": {"treeherder": {"symbol": "tp6"}}, 687 "payload": {"command": [["run"], ["perf-test {test_name}"]]}, 688 } 689 graph = make_graph( 690 make_task( 691 label="perftest-linux-side-by-side", 692 task_def={ 693 "name": "perftest-linux-side-by-side", 694 "payload": {"command": [["run"], ["perf-test {test_name}"]]}, 695 "extra": {"treeherder": {"symbol": "sxs"}}, 696 "metadata": {"name": "perftest-linux-side-by-side"}, 697 }, 698 ) 699 ) 700 701 responses.get( 702 f"{ROOT_URL}/api/queue/v1/task/{task_id}", 703 status=200, 704 json=task_def, 705 ) 706 m = mocker.patch("gecko_taskgraph.actions.side_by_side.fetch_graph_and_labels") 707 m.return_value = ("gid", graph, {}, None) 708 709 run_action( 710 "side-by-side", 711 task_id=task_id, 712 params={"head_rev": "newrev123", "pushlog_id": "100"}, 713 input={"revision": "baserev456", "project": "autoland"}, 714 ) 715 716 to_run = get_artifact("to-run.json") 717 assert "perftest-linux-side-by-side" in to_run 718 719 720 def test_release_promotion( 721 mocker, monkeypatch, responses, run_action, parameters, graph_config 722 ): 723 m = mocker.patch("gecko_taskgraph.actions.release_promotion.taskgraph_decision") 724 725 action_task_id = "action-task-id" 726 monkeypatch.setenv("TASK_ID", action_task_id) 727 728 responses.get( 729 f"{ROOT_URL}/api/index/v1/task/gecko.v2.try.revision.abcdef.taskgraph.decision", 730 status=200, 731 json={"taskId": "decision-task-id"}, 732 ) 733 734 responses.get( 735 f"{ROOT_URL}/api/queue/v1/task/decision-task-id/artifacts/public%2Fparameters.yml", 736 status=200, 737 body=yaml.dump({ 738 "base_repository": "http://hg.example.com", 739 "head_repository": "http://hg.example.com", 740 "head_rev": "abcdef", 741 "project": "try", 742 "level": "1", 743 "pushlog_id": "100", 744 "required_signoffs": [], 745 "signoff_urls": {}, 746 "release_product": "firefox", 747 "release_type": "nightly", 748 }), 749 content_type="application/x-yaml", 750 ) 751 responses.get( 752 f"{ROOT_URL}/api/queue/v1/task/decision-task-id/artifacts/public%2Ffull-task-graph.json", 753 status=200, 754 json={}, 755 ) 756 757 responses.get( 758 f"{ROOT_URL}/api/queue/v1/task-group/{action_task_id}/list", 759 status=200, 760 json={ 761 "tasks": [ 762 {"status": {"taskId": action_task_id, "state": "running"}}, 763 ] 764 }, 765 ) 766 767 mocker.patch( 768 "gecko_taskgraph.actions.release_promotion.find_existing_tasks_from_previous_kinds", 769 return_value={}, 770 ) 771 772 run_action( 773 "release-promotion", 774 params={ 775 "project": "try", 776 "level": "1", 777 }, 778 input={ 779 "release_promotion_flavor": "promote_firefox", 780 "build_number": 1, 781 "version": "", 782 "partial_updates": {}, 783 "release_enable_partner_repack": False, 784 "release_enable_partner_attribution": False, 785 "release_enable_emefree": False, 786 }, 787 ) 788 789 m.assert_called_once() 790 args, kwargs = m.call_args 791 assert args[0] == {"root": graph_config.root_dir} 792 assert kwargs["parameters"]["target_tasks_method"] == "promote_desktop" 793 794 795 if __name__ == "__main__": 796 main()