Skip to content

Commit 65c4fe5

Browse files
authored
Match xfail behavior for pytest and unitttest with TAP spec (#57)
* Add a test for unittest.expectedFailure behavior. * Handle xfail behavior changes. * Add release docs about the behavior change. * Make the unittest test assertion formatting consistent with other tests.
1 parent 6c67f3a commit 65c4fe5

File tree

3 files changed

+127
-30
lines changed

3 files changed

+127
-30
lines changed

docs/releases.rst

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,9 @@ Version 3.2, To Be Released
66

77
* Add support for Python 3.8.
88
* Add support for Python 3.9.
9+
* Handle ``unittest.expectedFailure`` and ``pytest.xfail``
10+
in a way that is more consistent
11+
with the TAP specification.
912

1013
Version 3.1, Released March 25, 2020
1114
------------------------------------

src/pytest_tap/plugin.py

Lines changed: 29 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -98,36 +98,50 @@ def pytest_runtest_logreport(report):
9898
# Handle xfails first because they report in unusual ways.
9999
# Non-strict xfails will include `wasxfail` while strict xfails won't.
100100
if hasattr(report, "wasxfail"):
101-
directive = ""
101+
reason = ""
102+
# pytest adds an ugly "reason: " for expectedFailure
103+
# even though the standard library doesn't accept a reason for that decorator.
104+
# Ignore the "reason: " from pytest.
105+
if report.wasxfail and report.wasxfail != "reason: ":
106+
reason = ": {}".format(report.wasxfail)
107+
102108
if report.skipped:
103-
directive = "TODO expected failure: {}".format(report.wasxfail)
109+
directive = "TODO expected failure{}".format(reason)
110+
tracker.add_not_ok(testcase, description, directive=directive)
104111
elif report.passed:
105-
directive = "TODO unexpected success: {}".format(report.wasxfail)
106-
107-
tracker.add_ok(testcase, description, directive=directive)
112+
directive = "TODO unexpected success{}".format(reason)
113+
tracker.add_ok(testcase, description, directive=directive)
108114
elif report.passed:
109115
tracker.add_ok(testcase, description)
110116
elif report.failed:
111117
diagnostics = _make_as_diagnostics(report)
112118

113-
# strict xfail mode should include the todo directive.
114-
# The only indicator that strict xfail occurred for this report
115-
# is to check longrepr.
116-
directive = ""
117-
if isinstance(report.longrepr, str) and "[XPASS(strict)]" in report.longrepr:
118-
directive = "TODO"
119+
# pytest treats an unexpected success from unitest.expectedFailure as a failure.
120+
# To match up with TAPTestResult and the TAP spec, treat the pass
121+
# as an ok with a todo directive instead.
122+
if isinstance(report.longrepr, str) and "Unexpected success" in report.longrepr:
123+
tracker.add_ok(testcase, description, directive="TODO unexpected success")
124+
return
119125

120-
tracker.add_not_ok(
121-
testcase, description, directive=directive, diagnostics=diagnostics
122-
)
126+
# A strict xfail that passes (i.e., XPASS) should be marked as a failure.
127+
# The only indicator that strict xfail occurred for XPASS is to check longrepr.
128+
if isinstance(report.longrepr, str) and "[XPASS(strict)]" in report.longrepr:
129+
tracker.add_not_ok(
130+
testcase,
131+
description,
132+
directive="unexpected success: {}".format(report.longrepr),
133+
)
134+
return
135+
136+
tracker.add_not_ok(testcase, description, diagnostics=diagnostics)
123137
elif report.skipped:
124138
reason = report.longrepr[2].split(":", 1)[1].strip()
125139
tracker.add_skip(testcase, description, reason)
126140

127141

128142
def _make_as_diagnostics(report):
129143
"""Format a report as TAP diagnostic output."""
130-
lines = report.longreprtext.splitlines(True)
144+
lines = report.longreprtext.splitlines(keepends=True)
131145
return format_as_diagnostics(lines)
132146

133147

tests/test_plugin.py

Lines changed: 95 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ def test_stream(testdir, sample_test_file):
5757
"ok 3 test_stream.py::test_params[foo]",
5858
"ok 4 test_stream.py::test_params[bar]",
5959
"ok 5 test_stream.py::test_skipped # SKIP some reason",
60-
"ok 6 test_stream.py::test_broken # TODO expected failure: a reason",
60+
"not ok 6 test_stream.py::test_broken # TODO expected failure: a reason",
6161
]
6262
)
6363

@@ -78,7 +78,7 @@ def test_combined(testdir, sample_test_file):
7878
"ok 3 test_combined.py::test_params[foo]",
7979
"ok 4 test_combined.py::test_params[bar]",
8080
"ok 5 test_combined.py::test_skipped # SKIP some reason",
81-
"ok 6 test_combined.py::test_broken # TODO expected failure: a reason",
81+
"not ok 6 test_combined.py::test_broken # TODO expected failure: a reason",
8282
]
8383
# If the dependencies for version 13 happen to be installed, tweak the output.
8484
if ENABLE_VERSION_13:
@@ -112,34 +112,114 @@ def test_outdir(testdir, sample_test_file):
112112
assert testresults.check()
113113

114114

115-
def test_xfail_strict_function(testdir):
116-
"""An xfail with strict on will fail when it unexpectedly passes.
115+
def test_xfail_no_reason(testdir):
116+
"""xfails output gracefully when no reason is provided."""
117+
testdir.makepyfile(
118+
"""
119+
import pytest
117120
118-
The xfail should look like an xfail by including the TODO directive.
121+
@pytest.mark.xfail(strict=False)
122+
def test_unexpected_success():
123+
assert True
124+
125+
@pytest.mark.xfail(strict=False)
126+
def test_expected_failure():
127+
assert False
119128
"""
129+
)
130+
result = testdir.runpytest_subprocess("--tap-stream")
131+
132+
result.stdout.fnmatch_lines(
133+
[
134+
"ok 1 test_xfail_no_reason.py::test_unexpected_success "
135+
"# TODO unexpected success",
136+
"not ok 2 test_xfail_no_reason.py::test_expected_failure "
137+
"# TODO expected failure",
138+
]
139+
)
140+
141+
142+
def test_xfail_nonstrict(testdir):
143+
"""Non-strict xfails are treated as TODO directives."""
120144
testdir.makepyfile(
121145
"""
122146
import pytest
123147
124-
@pytest.mark.xfail(reason='a reason')
125-
def test_unexpected_pass():
148+
@pytest.mark.xfail(strict=False, reason='a reason')
149+
def test_unexpected_success():
126150
assert True
127151
128-
@pytest.mark.xfail(reason='a reason', strict=True)
129-
def test_broken():
152+
@pytest.mark.xfail(strict=False, reason='a reason')
153+
def test_expected_failure():
154+
assert False
155+
"""
156+
)
157+
result = testdir.runpytest_subprocess("--tap-stream")
158+
159+
result.stdout.fnmatch_lines(
160+
[
161+
"ok 1 test_xfail_nonstrict.py::test_unexpected_success "
162+
"# TODO unexpected success: a reason",
163+
"not ok 2 test_xfail_nonstrict.py::test_expected_failure "
164+
"# TODO expected failure: a reason",
165+
]
166+
)
167+
168+
169+
def test_xfail_strict(testdir):
170+
"""xfail strict mode handles expected behavior."""
171+
testdir.makepyfile(
172+
"""
173+
import pytest
174+
175+
@pytest.mark.xfail(strict=True, reason='a reason')
176+
def test_unexpected_success():
130177
assert True
178+
179+
@pytest.mark.xfail(strict=True, reason='a reason')
180+
def test_expected_failure():
181+
assert False
182+
"""
183+
)
184+
result = testdir.runpytest_subprocess("--tap-stream")
185+
186+
result.stdout.fnmatch_lines(
187+
[
188+
"not ok 1 test_xfail_strict.py::test_unexpected_success "
189+
"# unexpected success: [XPASS(strict)] a reason",
190+
"not ok 2 test_xfail_strict.py::test_expected_failure "
191+
"# TODO expected failure: a reason",
192+
]
193+
)
194+
195+
196+
def test_unittest_expected_failure(testdir):
197+
"""The plugin handles unittest's expectedFailure decorator behavior."""
198+
testdir.makepyfile(
199+
"""
200+
import pytest
201+
import unittest
202+
203+
class TestExpectedFailure(unittest.TestCase):
204+
@unittest.expectedFailure
205+
def test_when_failing(self):
206+
assert False
207+
208+
@unittest.expectedFailure
209+
def test_when_passing(self):
210+
assert True
131211
"""
132212
)
133213
result = testdir.runpytest_subprocess("--tap-stream")
134214

135215
result.stdout.fnmatch_lines(
136216
[
137-
(
138-
"ok 1 test_xfail_strict_function.py::test_unexpected_pass "
139-
"# TODO unexpected success: a reason"
140-
),
141-
"not ok 2 test_xfail_strict_function.py::test_broken # TODO",
142-
"# [XPASS(strict)] a reason",
217+
"not ok 1 test_unittest_expected_failure.py::"
218+
"TestExpectedFailure.test_when_failing "
219+
"# TODO expected failure",
220+
"ok 2 test_unittest_expected_failure.py::"
221+
"TestExpectedFailure.test_when_passing "
222+
"# TODO unexpected success",
143223
]
144224
)
145225

0 commit comments

Comments
 (0)