tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

test_unittest.py (48425B)


      1 # mypy: allow-untyped-defs
      2 import gc
      3 import sys
      4 from typing import List
      5 
      6 from _pytest.config import ExitCode
      7 from _pytest.monkeypatch import MonkeyPatch
      8 from _pytest.pytester import Pytester
      9 import pytest
     10 
     11 
     12 def test_simple_unittest(pytester: Pytester) -> None:
     13    testpath = pytester.makepyfile(
     14        """
     15        import unittest
     16        class MyTestCase(unittest.TestCase):
     17            def testpassing(self):
     18                self.assertEqual('foo', 'foo')
     19            def test_failing(self):
     20                self.assertEqual('foo', 'bar')
     21    """
     22    )
     23    reprec = pytester.inline_run(testpath)
     24    assert reprec.matchreport("testpassing").passed
     25    assert reprec.matchreport("test_failing").failed
     26 
     27 
     28 def test_runTest_method(pytester: Pytester) -> None:
     29    pytester.makepyfile(
     30        """
     31        import unittest
     32        class MyTestCaseWithRunTest(unittest.TestCase):
     33            def runTest(self):
     34                self.assertEqual('foo', 'foo')
     35        class MyTestCaseWithoutRunTest(unittest.TestCase):
     36            def runTest(self):
     37                self.assertEqual('foo', 'foo')
     38            def test_something(self):
     39                pass
     40        """
     41    )
     42    result = pytester.runpytest("-v")
     43    result.stdout.fnmatch_lines(
     44        """
     45        *MyTestCaseWithRunTest::runTest*
     46        *MyTestCaseWithoutRunTest::test_something*
     47        *2 passed*
     48    """
     49    )
     50 
     51 
     52 def test_isclasscheck_issue53(pytester: Pytester) -> None:
     53    testpath = pytester.makepyfile(
     54        """
     55        import unittest
     56        class _E(object):
     57            def __getattr__(self, tag):
     58                pass
     59        E = _E()
     60    """
     61    )
     62    result = pytester.runpytest(testpath)
     63    assert result.ret == ExitCode.NO_TESTS_COLLECTED
     64 
     65 
     66 def test_setup(pytester: Pytester) -> None:
     67    testpath = pytester.makepyfile(
     68        """
     69        import unittest
     70        class MyTestCase(unittest.TestCase):
     71            def setUp(self):
     72                self.foo = 1
     73            def setup_method(self, method):
     74                self.foo2 = 1
     75            def test_both(self):
     76                self.assertEqual(1, self.foo)
     77                assert self.foo2 == 1
     78            def teardown_method(self, method):
     79                assert 0, "42"
     80 
     81    """
     82    )
     83    reprec = pytester.inline_run("-s", testpath)
     84    assert reprec.matchreport("test_both", when="call").passed
     85    rep = reprec.matchreport("test_both", when="teardown")
     86    assert rep.failed and "42" in str(rep.longrepr)
     87 
     88 
     89 def test_setUpModule(pytester: Pytester) -> None:
     90    testpath = pytester.makepyfile(
     91        """
     92        values = []
     93 
     94        def setUpModule():
     95            values.append(1)
     96 
     97        def tearDownModule():
     98            del values[0]
     99 
    100        def test_hello():
    101            assert values == [1]
    102 
    103        def test_world():
    104            assert values == [1]
    105        """
    106    )
    107    result = pytester.runpytest(testpath)
    108    result.stdout.fnmatch_lines(["*2 passed*"])
    109 
    110 
    111 def test_setUpModule_failing_no_teardown(pytester: Pytester) -> None:
    112    testpath = pytester.makepyfile(
    113        """
    114        values = []
    115 
    116        def setUpModule():
    117            0/0
    118 
    119        def tearDownModule():
    120            values.append(1)
    121 
    122        def test_hello():
    123            pass
    124    """
    125    )
    126    reprec = pytester.inline_run(testpath)
    127    reprec.assertoutcome(passed=0, failed=1)
    128    call = reprec.getcalls("pytest_runtest_setup")[0]
    129    assert not call.item.module.values
    130 
    131 
    132 def test_new_instances(pytester: Pytester) -> None:
    133    testpath = pytester.makepyfile(
    134        """
    135        import unittest
    136        class MyTestCase(unittest.TestCase):
    137            def test_func1(self):
    138                self.x = 2
    139            def test_func2(self):
    140                assert not hasattr(self, 'x')
    141    """
    142    )
    143    reprec = pytester.inline_run(testpath)
    144    reprec.assertoutcome(passed=2)
    145 
    146 
    147 def test_function_item_obj_is_instance(pytester: Pytester) -> None:
    148    """item.obj should be a bound method on unittest.TestCase function items (#5390)."""
    149    pytester.makeconftest(
    150        """
    151        def pytest_runtest_makereport(item, call):
    152            if call.when == 'call':
    153                class_ = item.parent.obj
    154                assert isinstance(item.obj.__self__, class_)
    155    """
    156    )
    157    pytester.makepyfile(
    158        """
    159        import unittest
    160 
    161        class Test(unittest.TestCase):
    162            def test_foo(self):
    163                pass
    164    """
    165    )
    166    result = pytester.runpytest_inprocess()
    167    result.stdout.fnmatch_lines(["* 1 passed in*"])
    168 
    169 
    170 def test_teardown(pytester: Pytester) -> None:
    171    testpath = pytester.makepyfile(
    172        """
    173        import unittest
    174        class MyTestCase(unittest.TestCase):
    175            values = []
    176            def test_one(self):
    177                pass
    178            def tearDown(self):
    179                self.values.append(None)
    180        class Second(unittest.TestCase):
    181            def test_check(self):
    182                self.assertEqual(MyTestCase.values, [None])
    183    """
    184    )
    185    reprec = pytester.inline_run(testpath)
    186    passed, skipped, failed = reprec.countoutcomes()
    187    assert failed == 0, failed
    188    assert passed == 2
    189    assert passed + skipped + failed == 2
    190 
    191 
    192 def test_teardown_issue1649(pytester: Pytester) -> None:
    193    """
    194    Are TestCase objects cleaned up? Often unittest TestCase objects set
    195    attributes that are large and expensive during setUp.
    196 
    197    The TestCase will not be cleaned up if the test fails, because it
    198    would then exist in the stackframe.
    199    """
    200    testpath = pytester.makepyfile(
    201        """
    202        import unittest
    203        class TestCaseObjectsShouldBeCleanedUp(unittest.TestCase):
    204            def setUp(self):
    205                self.an_expensive_object = 1
    206            def test_demo(self):
    207                pass
    208 
    209    """
    210    )
    211 
    212    pytester.inline_run("-s", testpath)
    213    gc.collect()
    214 
    215    # Either already destroyed, or didn't run setUp.
    216    for obj in gc.get_objects():
    217        if type(obj).__name__ == "TestCaseObjectsShouldBeCleanedUp":
    218            assert not hasattr(obj, "an_expensive_obj")
    219 
    220 
    221 def test_unittest_skip_issue148(pytester: Pytester) -> None:
    222    testpath = pytester.makepyfile(
    223        """
    224        import unittest
    225 
    226        @unittest.skip("hello")
    227        class MyTestCase(unittest.TestCase):
    228            @classmethod
    229            def setUpClass(self):
    230                xxx
    231            def test_one(self):
    232                pass
    233            @classmethod
    234            def tearDownClass(self):
    235                xxx
    236    """
    237    )
    238    reprec = pytester.inline_run(testpath)
    239    reprec.assertoutcome(skipped=1)
    240 
    241 
    242 def test_method_and_teardown_failing_reporting(pytester: Pytester) -> None:
    243    pytester.makepyfile(
    244        """
    245        import unittest
    246        class TC(unittest.TestCase):
    247            def tearDown(self):
    248                assert 0, "down1"
    249            def test_method(self):
    250                assert False, "down2"
    251    """
    252    )
    253    result = pytester.runpytest("-s")
    254    assert result.ret == 1
    255    result.stdout.fnmatch_lines(
    256        [
    257            "*tearDown*",
    258            "*assert 0*",
    259            "*test_method*",
    260            "*assert False*",
    261            "*1 failed*1 error*",
    262        ]
    263    )
    264 
    265 
    266 def test_setup_failure_is_shown(pytester: Pytester) -> None:
    267    pytester.makepyfile(
    268        """
    269        import unittest
    270        import pytest
    271        class TC(unittest.TestCase):
    272            def setUp(self):
    273                assert 0, "down1"
    274            def test_method(self):
    275                print("never42")
    276                xyz
    277    """
    278    )
    279    result = pytester.runpytest("-s")
    280    assert result.ret == 1
    281    result.stdout.fnmatch_lines(["*setUp*", "*assert 0*down1*", "*1 failed*"])
    282    result.stdout.no_fnmatch_line("*never42*")
    283 
    284 
    285 def test_setup_setUpClass(pytester: Pytester) -> None:
    286    testpath = pytester.makepyfile(
    287        """
    288        import unittest
    289        import pytest
    290        class MyTestCase(unittest.TestCase):
    291            x = 0
    292            @classmethod
    293            def setUpClass(cls):
    294                cls.x += 1
    295            def test_func1(self):
    296                assert self.x == 1
    297            def test_func2(self):
    298                assert self.x == 1
    299            @classmethod
    300            def tearDownClass(cls):
    301                cls.x -= 1
    302        def test_torn_down():
    303            assert MyTestCase.x == 0
    304    """
    305    )
    306    reprec = pytester.inline_run(testpath)
    307    reprec.assertoutcome(passed=3)
    308 
    309 
    310 def test_fixtures_setup_setUpClass_issue8394(pytester: Pytester) -> None:
    311    pytester.makepyfile(
    312        """
    313        import unittest
    314        class MyTestCase(unittest.TestCase):
    315            @classmethod
    316            def setUpClass(cls):
    317                pass
    318            def test_func1(self):
    319                pass
    320            @classmethod
    321            def tearDownClass(cls):
    322                pass
    323    """
    324    )
    325    result = pytester.runpytest("--fixtures")
    326    assert result.ret == 0
    327    result.stdout.no_fnmatch_line("*no docstring available*")
    328 
    329    result = pytester.runpytest("--fixtures", "-v")
    330    assert result.ret == 0
    331    result.stdout.fnmatch_lines(["*no docstring available*"])
    332 
    333 
    334 def test_setup_class(pytester: Pytester) -> None:
    335    testpath = pytester.makepyfile(
    336        """
    337        import unittest
    338        import pytest
    339        class MyTestCase(unittest.TestCase):
    340            x = 0
    341            def setup_class(cls):
    342                cls.x += 1
    343            def test_func1(self):
    344                assert self.x == 1
    345            def test_func2(self):
    346                assert self.x == 1
    347            def teardown_class(cls):
    348                cls.x -= 1
    349        def test_torn_down():
    350            assert MyTestCase.x == 0
    351    """
    352    )
    353    reprec = pytester.inline_run(testpath)
    354    reprec.assertoutcome(passed=3)
    355 
    356 
    357 @pytest.mark.parametrize("type", ["Error", "Failure"])
    358 def test_testcase_adderrorandfailure_defers(pytester: Pytester, type: str) -> None:
    359    pytester.makepyfile(
    360        f"""
    361        from unittest import TestCase
    362        import pytest
    363        class MyTestCase(TestCase):
    364            def run(self, result):
    365                excinfo = pytest.raises(ZeroDivisionError, lambda: 0/0)
    366                try:
    367                    result.add{type}(self, excinfo._excinfo)
    368                except KeyboardInterrupt:
    369                    raise
    370                except:
    371                    pytest.fail("add{type} should not raise")
    372            def test_hello(self):
    373                pass
    374    """
    375    )
    376    result = pytester.runpytest()
    377    result.stdout.no_fnmatch_line("*should not raise*")
    378 
    379 
    380 @pytest.mark.parametrize("type", ["Error", "Failure"])
    381 def test_testcase_custom_exception_info(pytester: Pytester, type: str) -> None:
    382    pytester.makepyfile(
    383        """
    384        from typing import Generic, TypeVar
    385        from unittest import TestCase
    386        import pytest, _pytest._code
    387 
    388        class MyTestCase(TestCase):
    389            def run(self, result):
    390                excinfo = pytest.raises(ZeroDivisionError, lambda: 0/0)
    391                # We fake an incompatible exception info.
    392                class FakeExceptionInfo(Generic[TypeVar("E")]):
    393                    def __init__(self, *args, **kwargs):
    394                        mp.undo()
    395                        raise TypeError()
    396                    @classmethod
    397                    def from_current(cls):
    398                        return cls()
    399                    @classmethod
    400                    def from_exc_info(cls, *args, **kwargs):
    401                        return cls()
    402                mp = pytest.MonkeyPatch()
    403                mp.setattr(_pytest._code, 'ExceptionInfo', FakeExceptionInfo)
    404                try:
    405                    excinfo = excinfo._excinfo
    406                    result.add{type}(self, excinfo)
    407                finally:
    408                    mp.undo()
    409 
    410            def test_hello(self):
    411                pass
    412    """.format(**locals())
    413    )
    414    result = pytester.runpytest()
    415    result.stdout.fnmatch_lines(
    416        [
    417            "NOTE: Incompatible Exception Representation*",
    418            "*ZeroDivisionError*",
    419            "*1 failed*",
    420        ]
    421    )
    422 
    423 
    424 def test_testcase_totally_incompatible_exception_info(pytester: Pytester) -> None:
    425    import _pytest.unittest
    426 
    427    (item,) = pytester.getitems(
    428        """
    429        from unittest import TestCase
    430        class MyTestCase(TestCase):
    431            def test_hello(self):
    432                pass
    433    """
    434    )
    435    assert isinstance(item, _pytest.unittest.TestCaseFunction)
    436    item.addError(None, 42)  # type: ignore[arg-type]
    437    excinfo = item._excinfo
    438    assert excinfo is not None
    439    assert "ERROR: Unknown Incompatible" in str(excinfo.pop(0).getrepr())
    440 
    441 
    442 def test_module_level_pytestmark(pytester: Pytester) -> None:
    443    testpath = pytester.makepyfile(
    444        """
    445        import unittest
    446        import pytest
    447        pytestmark = pytest.mark.xfail
    448        class MyTestCase(unittest.TestCase):
    449            def test_func1(self):
    450                assert 0
    451    """
    452    )
    453    reprec = pytester.inline_run(testpath, "-s")
    454    reprec.assertoutcome(skipped=1)
    455 
    456 
    457 class TestTrialUnittest:
    458    def setup_class(cls):
    459        cls.ut = pytest.importorskip("twisted.trial.unittest")
    460        # on windows trial uses a socket for a reactor and apparently doesn't close it properly
    461        # https://twistedmatrix.com/trac/ticket/9227
    462        cls.ignore_unclosed_socket_warning = ("-W", "always")
    463 
    464    def test_trial_testcase_runtest_not_collected(self, pytester: Pytester) -> None:
    465        pytester.makepyfile(
    466            """
    467            from twisted.trial.unittest import TestCase
    468 
    469            class TC(TestCase):
    470                def test_hello(self):
    471                    pass
    472        """
    473        )
    474        reprec = pytester.inline_run(*self.ignore_unclosed_socket_warning)
    475        reprec.assertoutcome(passed=1)
    476        pytester.makepyfile(
    477            """
    478            from twisted.trial.unittest import TestCase
    479 
    480            class TC(TestCase):
    481                def runTest(self):
    482                    pass
    483        """
    484        )
    485        reprec = pytester.inline_run(*self.ignore_unclosed_socket_warning)
    486        reprec.assertoutcome(passed=1)
    487 
    488    def test_trial_exceptions_with_skips(self, pytester: Pytester) -> None:
    489        pytester.makepyfile(
    490            """
    491            from twisted.trial import unittest
    492            import pytest
    493            class TC(unittest.TestCase):
    494                def test_hello(self):
    495                    pytest.skip("skip_in_method")
    496                @pytest.mark.skipif("sys.version_info != 1")
    497                def test_hello2(self):
    498                    pass
    499                @pytest.mark.xfail(reason="iwanto")
    500                def test_hello3(self):
    501                    assert 0
    502                def test_hello4(self):
    503                    pytest.xfail("i2wanto")
    504                def test_trial_skip(self):
    505                    pass
    506                test_trial_skip.skip = "trialselfskip"
    507 
    508                def test_trial_todo(self):
    509                    assert 0
    510                test_trial_todo.todo = "mytodo"
    511 
    512                def test_trial_todo_success(self):
    513                    pass
    514                test_trial_todo_success.todo = "mytodo"
    515 
    516            class TC2(unittest.TestCase):
    517                def setup_class(cls):
    518                    pytest.skip("skip_in_setup_class")
    519                def test_method(self):
    520                    pass
    521        """
    522        )
    523        result = pytester.runpytest("-rxs", *self.ignore_unclosed_socket_warning)
    524        result.stdout.fnmatch_lines_random(
    525            [
    526                "*XFAIL*test_trial_todo*",
    527                "*trialselfskip*",
    528                "*skip_in_setup_class*",
    529                "*iwanto*",
    530                "*i2wanto*",
    531                "*sys.version_info*",
    532                "*skip_in_method*",
    533                "*1 failed*4 skipped*3 xfailed*",
    534            ]
    535        )
    536        assert result.ret == 1
    537 
    538    def test_trial_error(self, pytester: Pytester) -> None:
    539        pytester.makepyfile(
    540            """
    541            from twisted.trial.unittest import TestCase
    542            from twisted.internet.defer import Deferred
    543            from twisted.internet import reactor
    544 
    545            class TC(TestCase):
    546                def test_one(self):
    547                    crash
    548 
    549                def test_two(self):
    550                    def f(_):
    551                        crash
    552 
    553                    d = Deferred()
    554                    d.addCallback(f)
    555                    reactor.callLater(0.3, d.callback, None)
    556                    return d
    557 
    558                def test_three(self):
    559                    def f():
    560                        pass # will never get called
    561                    reactor.callLater(0.3, f)
    562                # will crash at teardown
    563 
    564                def test_four(self):
    565                    def f(_):
    566                        reactor.callLater(0.3, f)
    567                        crash
    568 
    569                    d = Deferred()
    570                    d.addCallback(f)
    571                    reactor.callLater(0.3, d.callback, None)
    572                    return d
    573                # will crash both at test time and at teardown
    574        """
    575        )
    576        result = pytester.runpytest(
    577            "-vv", "-oconsole_output_style=classic", "-W", "ignore::DeprecationWarning"
    578        )
    579        result.stdout.fnmatch_lines(
    580            [
    581                "test_trial_error.py::TC::test_four FAILED",
    582                "test_trial_error.py::TC::test_four ERROR",
    583                "test_trial_error.py::TC::test_one FAILED",
    584                "test_trial_error.py::TC::test_three FAILED",
    585                "test_trial_error.py::TC::test_two FAILED",
    586                "*ERRORS*",
    587                "*_ ERROR at teardown of TC.test_four _*",
    588                "*DelayedCalls*",
    589                "*= FAILURES =*",
    590                "*_ TC.test_four _*",
    591                "*NameError*crash*",
    592                "*_ TC.test_one _*",
    593                "*NameError*crash*",
    594                "*_ TC.test_three _*",
    595                "*DelayedCalls*",
    596                "*_ TC.test_two _*",
    597                "*NameError*crash*",
    598                "*= 4 failed, 1 error in *",
    599            ]
    600        )
    601 
    602    def test_trial_pdb(self, pytester: Pytester) -> None:
    603        p = pytester.makepyfile(
    604            """
    605            from twisted.trial import unittest
    606            import pytest
    607            class TC(unittest.TestCase):
    608                def test_hello(self):
    609                    assert 0, "hellopdb"
    610        """
    611        )
    612        child = pytester.spawn_pytest(str(p))
    613        child.expect("hellopdb")
    614        child.sendeof()
    615 
    616    def test_trial_testcase_skip_property(self, pytester: Pytester) -> None:
    617        testpath = pytester.makepyfile(
    618            """
    619            from twisted.trial import unittest
    620            class MyTestCase(unittest.TestCase):
    621                skip = 'dont run'
    622                def test_func(self):
    623                    pass
    624            """
    625        )
    626        reprec = pytester.inline_run(testpath, "-s")
    627        reprec.assertoutcome(skipped=1)
    628 
    629    def test_trial_testfunction_skip_property(self, pytester: Pytester) -> None:
    630        testpath = pytester.makepyfile(
    631            """
    632            from twisted.trial import unittest
    633            class MyTestCase(unittest.TestCase):
    634                def test_func(self):
    635                    pass
    636                test_func.skip = 'dont run'
    637            """
    638        )
    639        reprec = pytester.inline_run(testpath, "-s")
    640        reprec.assertoutcome(skipped=1)
    641 
    642    def test_trial_testcase_todo_property(self, pytester: Pytester) -> None:
    643        testpath = pytester.makepyfile(
    644            """
    645            from twisted.trial import unittest
    646            class MyTestCase(unittest.TestCase):
    647                todo = 'dont run'
    648                def test_func(self):
    649                    assert 0
    650            """
    651        )
    652        reprec = pytester.inline_run(testpath, "-s")
    653        reprec.assertoutcome(skipped=1)
    654 
    655    def test_trial_testfunction_todo_property(self, pytester: Pytester) -> None:
    656        testpath = pytester.makepyfile(
    657            """
    658            from twisted.trial import unittest
    659            class MyTestCase(unittest.TestCase):
    660                def test_func(self):
    661                    assert 0
    662                test_func.todo = 'dont run'
    663            """
    664        )
    665        reprec = pytester.inline_run(
    666            testpath, "-s", *self.ignore_unclosed_socket_warning
    667        )
    668        reprec.assertoutcome(skipped=1)
    669 
    670 
    671 def test_djangolike_testcase(pytester: Pytester) -> None:
    672    # contributed from Morten Breekevold
    673    pytester.makepyfile(
    674        """
    675        from unittest import TestCase, main
    676 
    677        class DjangoLikeTestCase(TestCase):
    678 
    679            def setUp(self):
    680                print("setUp()")
    681 
    682            def test_presetup_has_been_run(self):
    683                print("test_thing()")
    684                self.assertTrue(hasattr(self, 'was_presetup'))
    685 
    686            def tearDown(self):
    687                print("tearDown()")
    688 
    689            def __call__(self, result=None):
    690                try:
    691                    self._pre_setup()
    692                except (KeyboardInterrupt, SystemExit):
    693                    raise
    694                except Exception:
    695                    import sys
    696                    result.addError(self, sys.exc_info())
    697                    return
    698                super(DjangoLikeTestCase, self).__call__(result)
    699                try:
    700                    self._post_teardown()
    701                except (KeyboardInterrupt, SystemExit):
    702                    raise
    703                except Exception:
    704                    import sys
    705                    result.addError(self, sys.exc_info())
    706                    return
    707 
    708            def _pre_setup(self):
    709                print("_pre_setup()")
    710                self.was_presetup = True
    711 
    712            def _post_teardown(self):
    713                print("_post_teardown()")
    714    """
    715    )
    716    result = pytester.runpytest("-s")
    717    assert result.ret == 0
    718    result.stdout.fnmatch_lines(
    719        [
    720            "*_pre_setup()*",
    721            "*setUp()*",
    722            "*test_thing()*",
    723            "*tearDown()*",
    724            "*_post_teardown()*",
    725        ]
    726    )
    727 
    728 
    729 def test_unittest_not_shown_in_traceback(pytester: Pytester) -> None:
    730    pytester.makepyfile(
    731        """
    732        import unittest
    733        class t(unittest.TestCase):
    734            def test_hello(self):
    735                x = 3
    736                self.assertEqual(x, 4)
    737    """
    738    )
    739    res = pytester.runpytest()
    740    res.stdout.no_fnmatch_line("*failUnlessEqual*")
    741 
    742 
    743 def test_unorderable_types(pytester: Pytester) -> None:
    744    pytester.makepyfile(
    745        """
    746        import unittest
    747        class TestJoinEmpty(unittest.TestCase):
    748            pass
    749 
    750        def make_test():
    751            class Test(unittest.TestCase):
    752                pass
    753            Test.__name__ = "TestFoo"
    754            return Test
    755        TestFoo = make_test()
    756    """
    757    )
    758    result = pytester.runpytest()
    759    result.stdout.no_fnmatch_line("*TypeError*")
    760    assert result.ret == ExitCode.NO_TESTS_COLLECTED
    761 
    762 
    763 def test_unittest_typerror_traceback(pytester: Pytester) -> None:
    764    pytester.makepyfile(
    765        """
    766        import unittest
    767        class TestJoinEmpty(unittest.TestCase):
    768            def test_hello(self, arg1):
    769                pass
    770    """
    771    )
    772    result = pytester.runpytest()
    773    assert "TypeError" in result.stdout.str()
    774    assert result.ret == 1
    775 
    776 
    777 @pytest.mark.parametrize("runner", ["pytest", "unittest"])
    778 def test_unittest_expected_failure_for_failing_test_is_xfail(
    779    pytester: Pytester, runner
    780 ) -> None:
    781    script = pytester.makepyfile(
    782        """
    783        import unittest
    784        class MyTestCase(unittest.TestCase):
    785            @unittest.expectedFailure
    786            def test_failing_test_is_xfail(self):
    787                assert False
    788        if __name__ == '__main__':
    789            unittest.main()
    790    """
    791    )
    792    if runner == "pytest":
    793        result = pytester.runpytest("-rxX")
    794        result.stdout.fnmatch_lines(
    795            ["*XFAIL*MyTestCase*test_failing_test_is_xfail*", "*1 xfailed*"]
    796        )
    797    else:
    798        result = pytester.runpython(script)
    799        result.stderr.fnmatch_lines(["*1 test in*", "*OK*(expected failures=1)*"])
    800    assert result.ret == 0
    801 
    802 
    803 @pytest.mark.parametrize("runner", ["pytest", "unittest"])
    804 def test_unittest_expected_failure_for_passing_test_is_fail(
    805    pytester: Pytester,
    806    runner: str,
    807 ) -> None:
    808    script = pytester.makepyfile(
    809        """
    810        import unittest
    811        class MyTestCase(unittest.TestCase):
    812            @unittest.expectedFailure
    813            def test_passing_test_is_fail(self):
    814                assert True
    815        if __name__ == '__main__':
    816            unittest.main()
    817    """
    818    )
    819 
    820    if runner == "pytest":
    821        result = pytester.runpytest("-rxX")
    822        result.stdout.fnmatch_lines(
    823            [
    824                "*MyTestCase*test_passing_test_is_fail*",
    825                "Unexpected success",
    826                "*1 failed*",
    827            ]
    828        )
    829    else:
    830        result = pytester.runpython(script)
    831        result.stderr.fnmatch_lines(["*1 test in*", "*(unexpected successes=1)*"])
    832 
    833    assert result.ret == 1
    834 
    835 
    836 @pytest.mark.parametrize("stmt", ["return", "yield"])
    837 def test_unittest_setup_interaction(pytester: Pytester, stmt: str) -> None:
    838    pytester.makepyfile(
    839        f"""
    840        import unittest
    841        import pytest
    842        class MyTestCase(unittest.TestCase):
    843            @pytest.fixture(scope="class", autouse=True)
    844            def perclass(self, request):
    845                request.cls.hello = "world"
    846                {stmt}
    847            @pytest.fixture(scope="function", autouse=True)
    848            def perfunction(self, request):
    849                request.instance.funcname = request.function.__name__
    850                {stmt}
    851 
    852            def test_method1(self):
    853                assert self.funcname == "test_method1"
    854                assert self.hello == "world"
    855 
    856            def test_method2(self):
    857                assert self.funcname == "test_method2"
    858 
    859            def test_classattr(self):
    860                assert self.__class__.hello == "world"
    861    """
    862    )
    863    result = pytester.runpytest()
    864    result.stdout.fnmatch_lines(["*3 passed*"])
    865 
    866 
    867 def test_non_unittest_no_setupclass_support(pytester: Pytester) -> None:
    868    testpath = pytester.makepyfile(
    869        """
    870        class TestFoo(object):
    871            x = 0
    872 
    873            @classmethod
    874            def setUpClass(cls):
    875                cls.x = 1
    876 
    877            def test_method1(self):
    878                assert self.x == 0
    879 
    880            @classmethod
    881            def tearDownClass(cls):
    882                cls.x = 1
    883 
    884        def test_not_torn_down():
    885            assert TestFoo.x == 0
    886 
    887    """
    888    )
    889    reprec = pytester.inline_run(testpath)
    890    reprec.assertoutcome(passed=2)
    891 
    892 
    893 def test_no_teardown_if_setupclass_failed(pytester: Pytester) -> None:
    894    testpath = pytester.makepyfile(
    895        """
    896        import unittest
    897 
    898        class MyTestCase(unittest.TestCase):
    899            x = 0
    900 
    901            @classmethod
    902            def setUpClass(cls):
    903                cls.x = 1
    904                assert False
    905 
    906            def test_func1(self):
    907                cls.x = 10
    908 
    909            @classmethod
    910            def tearDownClass(cls):
    911                cls.x = 100
    912 
    913        def test_notTornDown():
    914            assert MyTestCase.x == 1
    915    """
    916    )
    917    reprec = pytester.inline_run(testpath)
    918    reprec.assertoutcome(passed=1, failed=1)
    919 
    920 
    921 def test_cleanup_functions(pytester: Pytester) -> None:
    922    """Ensure functions added with addCleanup are always called after each test ends (#6947)"""
    923    pytester.makepyfile(
    924        """
    925        import unittest
    926 
    927        cleanups = []
    928 
    929        class Test(unittest.TestCase):
    930 
    931            def test_func_1(self):
    932                self.addCleanup(cleanups.append, "test_func_1")
    933 
    934            def test_func_2(self):
    935                self.addCleanup(cleanups.append, "test_func_2")
    936                assert 0
    937 
    938            def test_func_3_check_cleanups(self):
    939                assert cleanups == ["test_func_1", "test_func_2"]
    940    """
    941    )
    942    result = pytester.runpytest("-v")
    943    result.stdout.fnmatch_lines(
    944        [
    945            "*::test_func_1 PASSED *",
    946            "*::test_func_2 FAILED *",
    947            "*::test_func_3_check_cleanups PASSED *",
    948        ]
    949    )
    950 
    951 
    952 def test_issue333_result_clearing(pytester: Pytester) -> None:
    953    pytester.makeconftest(
    954        """
    955        import pytest
    956        @pytest.hookimpl(wrapper=True)
    957        def pytest_runtest_call(item):
    958            yield
    959            assert 0
    960    """
    961    )
    962    pytester.makepyfile(
    963        """
    964        import unittest
    965        class TestIt(unittest.TestCase):
    966            def test_func(self):
    967                0/0
    968    """
    969    )
    970 
    971    reprec = pytester.inline_run()
    972    reprec.assertoutcome(failed=1)
    973 
    974 
    975 def test_unittest_raise_skip_issue748(pytester: Pytester) -> None:
    976    pytester.makepyfile(
    977        test_foo="""
    978        import unittest
    979 
    980        class MyTestCase(unittest.TestCase):
    981            def test_one(self):
    982                raise unittest.SkipTest('skipping due to reasons')
    983    """
    984    )
    985    result = pytester.runpytest("-v", "-rs")
    986    result.stdout.fnmatch_lines(
    987        """
    988        *SKIP*[1]*test_foo.py*skipping due to reasons*
    989        *1 skipped*
    990    """
    991    )
    992 
    993 
    994 def test_unittest_skip_issue1169(pytester: Pytester) -> None:
    995    pytester.makepyfile(
    996        test_foo="""
    997        import unittest
    998 
    999        class MyTestCase(unittest.TestCase):
   1000            @unittest.skip("skipping due to reasons")
   1001            def test_skip(self):
   1002                 self.fail()
   1003        """
   1004    )
   1005    result = pytester.runpytest("-v", "-rs")
   1006    result.stdout.fnmatch_lines(
   1007        """
   1008        *SKIP*[1]*skipping due to reasons*
   1009        *1 skipped*
   1010    """
   1011    )
   1012 
   1013 
   1014 def test_class_method_containing_test_issue1558(pytester: Pytester) -> None:
   1015    pytester.makepyfile(
   1016        test_foo="""
   1017        import unittest
   1018 
   1019        class MyTestCase(unittest.TestCase):
   1020            def test_should_run(self):
   1021                pass
   1022            def test_should_not_run(self):
   1023                pass
   1024            test_should_not_run.__test__ = False
   1025    """
   1026    )
   1027    reprec = pytester.inline_run()
   1028    reprec.assertoutcome(passed=1)
   1029 
   1030 
   1031 @pytest.mark.parametrize("base", ["builtins.object", "unittest.TestCase"])
   1032 def test_usefixtures_marker_on_unittest(base, pytester: Pytester) -> None:
   1033    """#3498"""
   1034    module = base.rsplit(".", 1)[0]
   1035    pytest.importorskip(module)
   1036    pytester.makepyfile(
   1037        conftest="""
   1038        import pytest
   1039 
   1040        @pytest.fixture(scope='function')
   1041        def fixture1(request, monkeypatch):
   1042            monkeypatch.setattr(request.instance, 'fixture1', True )
   1043 
   1044 
   1045        @pytest.fixture(scope='function')
   1046        def fixture2(request, monkeypatch):
   1047            monkeypatch.setattr(request.instance, 'fixture2', True )
   1048 
   1049        def node_and_marks(item):
   1050            print(item.nodeid)
   1051            for mark in item.iter_markers():
   1052                print("  ", mark)
   1053 
   1054        @pytest.fixture(autouse=True)
   1055        def my_marks(request):
   1056            node_and_marks(request.node)
   1057 
   1058        def pytest_collection_modifyitems(items):
   1059            for item in items:
   1060               node_and_marks(item)
   1061 
   1062        """
   1063    )
   1064 
   1065    pytester.makepyfile(
   1066        f"""
   1067        import pytest
   1068        import {module}
   1069 
   1070        class Tests({base}):
   1071            fixture1 = False
   1072            fixture2 = False
   1073 
   1074            @pytest.mark.usefixtures("fixture1")
   1075            def test_one(self):
   1076                assert self.fixture1
   1077                assert not self.fixture2
   1078 
   1079            @pytest.mark.usefixtures("fixture1", "fixture2")
   1080            def test_two(self):
   1081                assert self.fixture1
   1082                assert self.fixture2
   1083 
   1084 
   1085    """
   1086    )
   1087 
   1088    result = pytester.runpytest("-s")
   1089    result.assert_outcomes(passed=2)
   1090 
   1091 
   1092 def test_testcase_handles_init_exceptions(pytester: Pytester) -> None:
   1093    """
   1094    Regression test to make sure exceptions in the __init__ method are bubbled up correctly.
   1095    See https://github.com/pytest-dev/pytest/issues/3788
   1096    """
   1097    pytester.makepyfile(
   1098        """
   1099        from unittest import TestCase
   1100        import pytest
   1101        class MyTestCase(TestCase):
   1102            def __init__(self, *args, **kwargs):
   1103                raise Exception("should raise this exception")
   1104            def test_hello(self):
   1105                pass
   1106    """
   1107    )
   1108    result = pytester.runpytest()
   1109    assert "should raise this exception" in result.stdout.str()
   1110    result.stdout.no_fnmatch_line("*ERROR at teardown of MyTestCase.test_hello*")
   1111 
   1112 
   1113 def test_error_message_with_parametrized_fixtures(pytester: Pytester) -> None:
   1114    pytester.copy_example("unittest/test_parametrized_fixture_error_message.py")
   1115    result = pytester.runpytest()
   1116    result.stdout.fnmatch_lines(
   1117        [
   1118            "*test_two does not support fixtures*",
   1119            "*TestSomethingElse::test_two",
   1120            "*Function type: TestCaseFunction",
   1121        ]
   1122    )
   1123 
   1124 
   1125 @pytest.mark.parametrize(
   1126    "test_name, expected_outcome",
   1127    [
   1128        ("test_setup_skip.py", "1 skipped"),
   1129        ("test_setup_skip_class.py", "1 skipped"),
   1130        ("test_setup_skip_module.py", "1 error"),
   1131    ],
   1132 )
   1133 def test_setup_inheritance_skipping(
   1134    pytester: Pytester, test_name, expected_outcome
   1135 ) -> None:
   1136    """Issue #4700"""
   1137    pytester.copy_example(f"unittest/{test_name}")
   1138    result = pytester.runpytest()
   1139    result.stdout.fnmatch_lines([f"* {expected_outcome} in *"])
   1140 
   1141 
   1142 def test_BdbQuit(pytester: Pytester) -> None:
   1143    pytester.makepyfile(
   1144        test_foo="""
   1145        import unittest
   1146 
   1147        class MyTestCase(unittest.TestCase):
   1148            def test_bdbquit(self):
   1149                import bdb
   1150                raise bdb.BdbQuit()
   1151 
   1152            def test_should_not_run(self):
   1153                pass
   1154    """
   1155    )
   1156    reprec = pytester.inline_run()
   1157    reprec.assertoutcome(failed=1, passed=1)
   1158 
   1159 
   1160 def test_exit_outcome(pytester: Pytester) -> None:
   1161    pytester.makepyfile(
   1162        test_foo="""
   1163        import pytest
   1164        import unittest
   1165 
   1166        class MyTestCase(unittest.TestCase):
   1167            def test_exit_outcome(self):
   1168                pytest.exit("pytest_exit called")
   1169 
   1170            def test_should_not_run(self):
   1171                pass
   1172    """
   1173    )
   1174    result = pytester.runpytest()
   1175    result.stdout.fnmatch_lines(["*Exit: pytest_exit called*", "*= no tests ran in *"])
   1176 
   1177 
   1178 def test_trace(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
   1179    calls = []
   1180 
   1181    def check_call(*args, **kwargs):
   1182        calls.append((args, kwargs))
   1183        assert args == ("runcall",)
   1184 
   1185        class _pdb:
   1186            def runcall(*args, **kwargs):
   1187                calls.append((args, kwargs))
   1188 
   1189        return _pdb
   1190 
   1191    monkeypatch.setattr("_pytest.debugging.pytestPDB._init_pdb", check_call)
   1192 
   1193    p1 = pytester.makepyfile(
   1194        """
   1195        import unittest
   1196 
   1197        class MyTestCase(unittest.TestCase):
   1198            def test(self):
   1199                self.assertEqual('foo', 'foo')
   1200    """
   1201    )
   1202    result = pytester.runpytest("--trace", str(p1))
   1203    assert len(calls) == 2
   1204    assert result.ret == 0
   1205 
   1206 
   1207 def test_pdb_teardown_called(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
   1208    """Ensure tearDown() is always called when --pdb is given in the command-line.
   1209 
   1210    We delay the normal tearDown() calls when --pdb is given, so this ensures we are calling
   1211    tearDown() eventually to avoid memory leaks when using --pdb.
   1212    """
   1213    teardowns: List[str] = []
   1214    monkeypatch.setattr(
   1215        pytest, "test_pdb_teardown_called_teardowns", teardowns, raising=False
   1216    )
   1217 
   1218    pytester.makepyfile(
   1219        """
   1220        import unittest
   1221        import pytest
   1222 
   1223        class MyTestCase(unittest.TestCase):
   1224 
   1225            def tearDown(self):
   1226                pytest.test_pdb_teardown_called_teardowns.append(self.id())
   1227 
   1228            def test_1(self):
   1229                pass
   1230            def test_2(self):
   1231                pass
   1232    """
   1233    )
   1234    result = pytester.runpytest_inprocess("--pdb")
   1235    result.stdout.fnmatch_lines("* 2 passed in *")
   1236    assert teardowns == [
   1237        "test_pdb_teardown_called.MyTestCase.test_1",
   1238        "test_pdb_teardown_called.MyTestCase.test_2",
   1239    ]
   1240 
   1241 
   1242 @pytest.mark.parametrize("mark", ["@unittest.skip", "@pytest.mark.skip"])
   1243 def test_pdb_teardown_skipped_for_functions(
   1244    pytester: Pytester, monkeypatch: MonkeyPatch, mark: str
   1245 ) -> None:
   1246    """
   1247    With --pdb, setUp and tearDown should not be called for tests skipped
   1248    via a decorator (#7215).
   1249    """
   1250    tracked: List[str] = []
   1251    monkeypatch.setattr(pytest, "track_pdb_teardown_skipped", tracked, raising=False)
   1252 
   1253    pytester.makepyfile(
   1254        f"""
   1255        import unittest
   1256        import pytest
   1257 
   1258        class MyTestCase(unittest.TestCase):
   1259 
   1260            def setUp(self):
   1261                pytest.track_pdb_teardown_skipped.append("setUp:" + self.id())
   1262 
   1263            def tearDown(self):
   1264                pytest.track_pdb_teardown_skipped.append("tearDown:" + self.id())
   1265 
   1266            {mark}("skipped for reasons")
   1267            def test_1(self):
   1268                pass
   1269 
   1270    """
   1271    )
   1272    result = pytester.runpytest_inprocess("--pdb")
   1273    result.stdout.fnmatch_lines("* 1 skipped in *")
   1274    assert tracked == []
   1275 
   1276 
   1277 @pytest.mark.parametrize("mark", ["@unittest.skip", "@pytest.mark.skip"])
   1278 def test_pdb_teardown_skipped_for_classes(
   1279    pytester: Pytester, monkeypatch: MonkeyPatch, mark: str
   1280 ) -> None:
   1281    """
   1282    With --pdb, setUp and tearDown should not be called for tests skipped
   1283    via a decorator on the class (#10060).
   1284    """
   1285    tracked: List[str] = []
   1286    monkeypatch.setattr(pytest, "track_pdb_teardown_skipped", tracked, raising=False)
   1287 
   1288    pytester.makepyfile(
   1289        f"""
   1290        import unittest
   1291        import pytest
   1292 
   1293        {mark}("skipped for reasons")
   1294        class MyTestCase(unittest.TestCase):
   1295 
   1296            def setUp(self):
   1297                pytest.track_pdb_teardown_skipped.append("setUp:" + self.id())
   1298 
   1299            def tearDown(self):
   1300                pytest.track_pdb_teardown_skipped.append("tearDown:" + self.id())
   1301 
   1302            def test_1(self):
   1303                pass
   1304 
   1305    """
   1306    )
   1307    result = pytester.runpytest_inprocess("--pdb")
   1308    result.stdout.fnmatch_lines("* 1 skipped in *")
   1309    assert tracked == []
   1310 
   1311 
   1312 def test_async_support(pytester: Pytester) -> None:
   1313    pytest.importorskip("unittest.async_case")
   1314 
   1315    pytester.copy_example("unittest/test_unittest_asyncio.py")
   1316    reprec = pytester.inline_run()
   1317    reprec.assertoutcome(failed=1, passed=2)
   1318 
   1319 
   1320 def test_asynctest_support(pytester: Pytester) -> None:
   1321    """Check asynctest support (#7110)"""
   1322    pytest.importorskip("asynctest")
   1323 
   1324    pytester.copy_example("unittest/test_unittest_asynctest.py")
   1325    reprec = pytester.inline_run()
   1326    reprec.assertoutcome(failed=1, passed=2)
   1327 
   1328 
   1329 def test_plain_unittest_does_not_support_async(pytester: Pytester) -> None:
   1330    """Async functions in plain unittest.TestCase subclasses are not supported without plugins.
   1331 
   1332    This test exists here to avoid introducing this support by accident, leading users
   1333    to expect that it works, rather than doing so intentionally as a feature.
   1334 
   1335    See https://github.com/pytest-dev/pytest-asyncio/issues/180 for more context.
   1336    """
   1337    pytester.copy_example("unittest/test_unittest_plain_async.py")
   1338    result = pytester.runpytest_subprocess()
   1339    if hasattr(sys, "pypy_version_info"):
   1340        # in PyPy we can't reliable get the warning about the coroutine not being awaited,
   1341        # because it depends on the coroutine being garbage collected; given that
   1342        # we are running in a subprocess, that's difficult to enforce
   1343        expected_lines = ["*1 passed*"]
   1344    else:
   1345        expected_lines = [
   1346            "*RuntimeWarning: coroutine * was never awaited",
   1347            "*1 passed*",
   1348        ]
   1349    result.stdout.fnmatch_lines(expected_lines)
   1350 
   1351 
   1352 def test_do_class_cleanups_on_success(pytester: Pytester) -> None:
   1353    testpath = pytester.makepyfile(
   1354        """
   1355        import unittest
   1356        class MyTestCase(unittest.TestCase):
   1357            values = []
   1358            @classmethod
   1359            def setUpClass(cls):
   1360                def cleanup():
   1361                    cls.values.append(1)
   1362                cls.addClassCleanup(cleanup)
   1363            def test_one(self):
   1364                pass
   1365            def test_two(self):
   1366                pass
   1367        def test_cleanup_called_exactly_once():
   1368            assert MyTestCase.values == [1]
   1369    """
   1370    )
   1371    reprec = pytester.inline_run(testpath)
   1372    passed, skipped, failed = reprec.countoutcomes()
   1373    assert failed == 0
   1374    assert passed == 3
   1375 
   1376 
   1377 def test_do_class_cleanups_on_setupclass_failure(pytester: Pytester) -> None:
   1378    testpath = pytester.makepyfile(
   1379        """
   1380        import unittest
   1381        class MyTestCase(unittest.TestCase):
   1382            values = []
   1383            @classmethod
   1384            def setUpClass(cls):
   1385                def cleanup():
   1386                    cls.values.append(1)
   1387                cls.addClassCleanup(cleanup)
   1388                assert False
   1389            def test_one(self):
   1390                pass
   1391        def test_cleanup_called_exactly_once():
   1392            assert MyTestCase.values == [1]
   1393    """
   1394    )
   1395    reprec = pytester.inline_run(testpath)
   1396    passed, skipped, failed = reprec.countoutcomes()
   1397    assert failed == 1
   1398    assert passed == 1
   1399 
   1400 
   1401 def test_do_class_cleanups_on_teardownclass_failure(pytester: Pytester) -> None:
   1402    testpath = pytester.makepyfile(
   1403        """
   1404        import unittest
   1405        class MyTestCase(unittest.TestCase):
   1406            values = []
   1407            @classmethod
   1408            def setUpClass(cls):
   1409                def cleanup():
   1410                    cls.values.append(1)
   1411                cls.addClassCleanup(cleanup)
   1412            @classmethod
   1413            def tearDownClass(cls):
   1414                assert False
   1415            def test_one(self):
   1416                pass
   1417            def test_two(self):
   1418                pass
   1419        def test_cleanup_called_exactly_once():
   1420            assert MyTestCase.values == [1]
   1421    """
   1422    )
   1423    reprec = pytester.inline_run(testpath)
   1424    passed, skipped, failed = reprec.countoutcomes()
   1425    assert passed == 3
   1426 
   1427 
   1428 def test_do_cleanups_on_success(pytester: Pytester) -> None:
   1429    testpath = pytester.makepyfile(
   1430        """
   1431        import unittest
   1432        class MyTestCase(unittest.TestCase):
   1433            values = []
   1434            def setUp(self):
   1435                def cleanup():
   1436                    self.values.append(1)
   1437                self.addCleanup(cleanup)
   1438            def test_one(self):
   1439                pass
   1440            def test_two(self):
   1441                pass
   1442        def test_cleanup_called_the_right_number_of_times():
   1443            assert MyTestCase.values == [1, 1]
   1444    """
   1445    )
   1446    reprec = pytester.inline_run(testpath)
   1447    passed, skipped, failed = reprec.countoutcomes()
   1448    assert failed == 0
   1449    assert passed == 3
   1450 
   1451 
   1452 def test_do_cleanups_on_setup_failure(pytester: Pytester) -> None:
   1453    testpath = pytester.makepyfile(
   1454        """
   1455        import unittest
   1456        class MyTestCase(unittest.TestCase):
   1457            values = []
   1458            def setUp(self):
   1459                def cleanup():
   1460                    self.values.append(1)
   1461                self.addCleanup(cleanup)
   1462                assert False
   1463            def test_one(self):
   1464                pass
   1465            def test_two(self):
   1466                pass
   1467        def test_cleanup_called_the_right_number_of_times():
   1468            assert MyTestCase.values == [1, 1]
   1469    """
   1470    )
   1471    reprec = pytester.inline_run(testpath)
   1472    passed, skipped, failed = reprec.countoutcomes()
   1473    assert failed == 2
   1474    assert passed == 1
   1475 
   1476 
   1477 def test_do_cleanups_on_teardown_failure(pytester: Pytester) -> None:
   1478    testpath = pytester.makepyfile(
   1479        """
   1480        import unittest
   1481        class MyTestCase(unittest.TestCase):
   1482            values = []
   1483            def setUp(self):
   1484                def cleanup():
   1485                    self.values.append(1)
   1486                self.addCleanup(cleanup)
   1487            def tearDown(self):
   1488                assert False
   1489            def test_one(self):
   1490                pass
   1491            def test_two(self):
   1492                pass
   1493        def test_cleanup_called_the_right_number_of_times():
   1494            assert MyTestCase.values == [1, 1]
   1495    """
   1496    )
   1497    reprec = pytester.inline_run(testpath)
   1498    passed, skipped, failed = reprec.countoutcomes()
   1499    assert failed == 2
   1500    assert passed == 1
   1501 
   1502 
   1503 class TestClassCleanupErrors:
   1504    """
   1505    Make sure to show exceptions raised during class cleanup function (those registered
   1506    via addClassCleanup()).
   1507 
   1508    See #11728.
   1509    """
   1510 
   1511    def test_class_cleanups_failure_in_setup(self, pytester: Pytester) -> None:
   1512        testpath = pytester.makepyfile(
   1513            """
   1514            import unittest
   1515            class MyTestCase(unittest.TestCase):
   1516                @classmethod
   1517                def setUpClass(cls):
   1518                    def cleanup(n):
   1519                        raise Exception(f"fail {n}")
   1520                    cls.addClassCleanup(cleanup, 2)
   1521                    cls.addClassCleanup(cleanup, 1)
   1522                    raise Exception("fail 0")
   1523                def test(self):
   1524                    pass
   1525        """
   1526        )
   1527        result = pytester.runpytest("-s", testpath)
   1528        result.assert_outcomes(passed=0, errors=1)
   1529        result.stdout.fnmatch_lines(
   1530            [
   1531                "*Unittest class cleanup errors *2 sub-exceptions*",
   1532                "*Exception: fail 1",
   1533                "*Exception: fail 2",
   1534            ]
   1535        )
   1536        result.stdout.fnmatch_lines(
   1537            [
   1538                "* ERROR at setup of MyTestCase.test *",
   1539                "E * Exception: fail 0",
   1540            ]
   1541        )
   1542 
   1543    def test_class_cleanups_failure_in_teardown(self, pytester: Pytester) -> None:
   1544        testpath = pytester.makepyfile(
   1545            """
   1546            import unittest
   1547            class MyTestCase(unittest.TestCase):
   1548                @classmethod
   1549                def setUpClass(cls):
   1550                    def cleanup(n):
   1551                        raise Exception(f"fail {n}")
   1552                    cls.addClassCleanup(cleanup, 2)
   1553                    cls.addClassCleanup(cleanup, 1)
   1554                def test(self):
   1555                    pass
   1556        """
   1557        )
   1558        result = pytester.runpytest("-s", testpath)
   1559        result.assert_outcomes(passed=1, errors=1)
   1560        result.stdout.fnmatch_lines(
   1561            [
   1562                "*Unittest class cleanup errors *2 sub-exceptions*",
   1563                "*Exception: fail 1",
   1564                "*Exception: fail 2",
   1565            ]
   1566        )
   1567 
   1568    def test_class_cleanup_1_failure_in_teardown(self, pytester: Pytester) -> None:
   1569        testpath = pytester.makepyfile(
   1570            """
   1571            import unittest
   1572            class MyTestCase(unittest.TestCase):
   1573                @classmethod
   1574                def setUpClass(cls):
   1575                    def cleanup(n):
   1576                        raise Exception(f"fail {n}")
   1577                    cls.addClassCleanup(cleanup, 1)
   1578                def test(self):
   1579                    pass
   1580        """
   1581        )
   1582        result = pytester.runpytest("-s", testpath)
   1583        result.assert_outcomes(passed=1, errors=1)
   1584        result.stdout.fnmatch_lines(
   1585            [
   1586                "*ERROR at teardown of MyTestCase.test*",
   1587                "*Exception: fail 1",
   1588            ]
   1589        )
   1590 
   1591 
   1592 def test_traceback_pruning(pytester: Pytester) -> None:
   1593    """Regression test for #9610 - doesn't crash during traceback pruning."""
   1594    pytester.makepyfile(
   1595        """
   1596        import unittest
   1597 
   1598        class MyTestCase(unittest.TestCase):
   1599            def __init__(self, test_method):
   1600                unittest.TestCase.__init__(self, test_method)
   1601 
   1602        class TestIt(MyTestCase):
   1603            @classmethod
   1604            def tearDownClass(cls) -> None:
   1605                assert False
   1606 
   1607            def test_it(self):
   1608                pass
   1609        """
   1610    )
   1611    reprec = pytester.inline_run()
   1612    passed, skipped, failed = reprec.countoutcomes()
   1613    assert passed == 1
   1614    assert failed == 1
   1615    assert reprec.ret == 1
   1616 
   1617 
   1618 def test_raising_unittest_skiptest_during_collection(
   1619    pytester: Pytester,
   1620 ) -> None:
   1621    pytester.makepyfile(
   1622        """
   1623        import unittest
   1624 
   1625        class TestIt(unittest.TestCase):
   1626            def test_it(self): pass
   1627            def test_it2(self): pass
   1628 
   1629        raise unittest.SkipTest()
   1630 
   1631        class TestIt2(unittest.TestCase):
   1632            def test_it(self): pass
   1633            def test_it2(self): pass
   1634        """
   1635    )
   1636    reprec = pytester.inline_run()
   1637    passed, skipped, failed = reprec.countoutcomes()
   1638    assert passed == 0
   1639    # Unittest reports one fake test for a skipped module.
   1640    assert skipped == 1
   1641    assert failed == 0
   1642    assert reprec.ret == ExitCode.NO_TESTS_COLLECTED