test_decision_verify.py (3707B)
1 # Any copyright is dedicated to the public domain. 2 # http://creativecommons.org/publicdomain/zero/1.0/ 3 4 import os 5 6 import gecko_taskgraph 7 import pytest 8 from gecko_taskgraph.util.verify import verifications 9 from mozunit import main 10 from taskgraph.generator import TaskGraphGenerator 11 from taskgraph.parameters import Parameters 12 from taskgraph.taskgraph import TaskGraph 13 from taskgraph.util.taskcluster import get_artifact 14 15 16 def pytest_generate_tests(metafunc): 17 if "verification" in metafunc.fixturenames: 18 name = metafunc.function.__name__.split("_", 1)[1] 19 verification_objs = verifications._verifications.get(name, []) 20 ids = [v.func.__name__ for v in verification_objs] 21 metafunc.parametrize("verification", verification_objs, ids=ids) 22 23 24 @pytest.fixture(scope="module") 25 def parameters(): 26 if "TASK_GROUP_ID" not in os.environ: 27 pytest.skip(reason="requires a Decision taskId to test against") 28 29 return Parameters( 30 **get_artifact(os.environ["TASK_GROUP_ID"], "public/parameters.yml") 31 ) 32 33 34 @pytest.fixture(scope="module") 35 def tgg(parameters): 36 root = os.path.dirname(os.path.dirname(gecko_taskgraph.__file__)) 37 return TaskGraphGenerator(root, parameters) 38 39 40 @pytest.fixture(scope="module") 41 def graph_config(tgg): 42 return tgg.graph_config 43 44 45 @pytest.fixture(scope="module") 46 def kinds(tgg): 47 return {kind.name: kind for kind in tgg._load_kinds(tgg.graph_config, [])} 48 49 50 @pytest.fixture(scope="module") 51 def full_task_graph(): 52 if "TASK_GROUP_ID" not in os.environ: 53 pytest.skip(reason="requires a Decision taskId to test against") 54 55 return TaskGraph.from_json( 56 get_artifact(os.environ["TASK_GROUP_ID"], "public/full-task-graph.json") 57 )[1] 58 59 60 @pytest.fixture(scope="module") 61 def target_task_graph(): 62 if "TASK_GROUP_ID" not in os.environ: 63 pytest.skip(reason="requires a Decision taskId to test against") 64 65 return TaskGraph.from_json( 66 get_artifact(os.environ["TASK_GROUP_ID"], "public/target-tasks.json") 67 )[1] 68 69 70 @pytest.fixture(scope="module") 71 def morphed_task_graph(): 72 if "TASK_GROUP_ID" not in os.environ: 73 pytest.skip(reason="requires a Decision taskId to test against") 74 75 return TaskGraph.from_json( 76 get_artifact(os.environ["TASK_GROUP_ID"], "public/task-graph.json") 77 )[1] 78 79 80 def test_initial(verification): 81 verification.verify() 82 83 84 def test_graph_config(verification, graph_config): 85 verification.verify(graph_config) 86 87 88 def test_kinds(verification, kinds): 89 verification.verify(kinds) 90 91 92 def test_parameters(verification, parameters): 93 verification.verify(parameters) 94 95 96 def test_full_task_set(verification, full_task_graph, graph_config, parameters): 97 # We don't write out the full_task_set as a decision task artifact, but 98 # the full_task_graph is functionally equivalent. 99 verification.verify(full_task_graph, graph_config, parameters) 100 101 102 def test_full_task_graph(verification, full_task_graph, graph_config, parameters): 103 verification.verify(full_task_graph, graph_config, parameters) 104 105 106 def test_target_task_graph(verification, target_task_graph, graph_config, parameters): 107 verification.verify(target_task_graph, graph_config, parameters) 108 109 110 def test_optimized_task_graph( 111 verification, morphed_task_graph, graph_config, parameters 112 ): 113 # We don't write out the optimized graph as a decision task artifact, but 114 # the morphed graph is a good enough stand-in. 115 verification.verify(morphed_task_graph, graph_config, parameters) 116 117 118 def test_morphed_task_graph(verification, morphed_task_graph, graph_config, parameters): 119 verification.verify(morphed_task_graph, graph_config, parameters) 120 121 122 if __name__ == "__main__": 123 main()