Coverage for /usr/local/lib/python3.7/site-packages/_pytest/skipping.py : 29%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1""" support for skip/xfail functions and markers. """
2from _pytest.config import hookimpl
3from _pytest.mark.evaluate import MarkEvaluator
4from _pytest.outcomes import fail
5from _pytest.outcomes import skip
6from _pytest.outcomes import xfail
9def pytest_addoption(parser):
10 group = parser.getgroup("general")
11 group.addoption(
12 "--runxfail",
13 action="store_true",
14 dest="runxfail",
15 default=False,
16 help="report the results of xfail tests as if they were not marked",
17 )
19 parser.addini(
20 "xfail_strict",
21 "default for the strict parameter of xfail "
22 "markers when not given explicitly (default: False)",
23 default=False,
24 type="bool",
25 )
28def pytest_configure(config):
29 if config.option.runxfail:
30 # yay a hack
31 import pytest
33 old = pytest.xfail
34 config._cleanup.append(lambda: setattr(pytest, "xfail", old))
36 def nop(*args, **kwargs):
37 pass
39 nop.Exception = xfail.Exception
40 setattr(pytest, "xfail", nop)
42 config.addinivalue_line(
43 "markers",
44 "skip(reason=None): skip the given test function with an optional reason. "
45 'Example: skip(reason="no way of currently testing this") skips the '
46 "test.",
47 )
48 config.addinivalue_line(
49 "markers",
50 "skipif(condition): skip the given test function if eval(condition) "
51 "results in a True value. Evaluation happens within the "
52 "module global context. Example: skipif('sys.platform == \"win32\"') "
53 "skips the test if we are on the win32 platform. see "
54 "https://docs.pytest.org/en/latest/skipping.html",
55 )
56 config.addinivalue_line(
57 "markers",
58 "xfail(condition, reason=None, run=True, raises=None, strict=False): "
59 "mark the test function as an expected failure if eval(condition) "
60 "has a True value. Optionally specify a reason for better reporting "
61 "and run=False if you don't even want to execute the test function. "
62 "If only specific exception(s) are expected, you can list them in "
63 "raises, and if the test fails in other ways, it will be reported as "
64 "a true failure. See https://docs.pytest.org/en/latest/skipping.html",
65 )
68@hookimpl(tryfirst=True)
69def pytest_runtest_setup(item):
70 # Check if skip or skipif are specified as pytest marks
71 item._skipped_by_mark = False
72 eval_skipif = MarkEvaluator(item, "skipif")
73 if eval_skipif.istrue():
74 item._skipped_by_mark = True
75 skip(eval_skipif.getexplanation())
77 for skip_info in item.iter_markers(name="skip"):
78 item._skipped_by_mark = True
79 if "reason" in skip_info.kwargs:
80 skip(skip_info.kwargs["reason"])
81 elif skip_info.args:
82 skip(skip_info.args[0])
83 else:
84 skip("unconditional skip")
86 item._evalxfail = MarkEvaluator(item, "xfail")
87 check_xfail_no_run(item)
90@hookimpl(hookwrapper=True)
91def pytest_pyfunc_call(pyfuncitem):
92 check_xfail_no_run(pyfuncitem)
93 outcome = yield
94 passed = outcome.excinfo is None
95 if passed:
96 check_strict_xfail(pyfuncitem)
99def check_xfail_no_run(item):
100 """check xfail(run=False)"""
101 if not item.config.option.runxfail:
102 evalxfail = item._evalxfail
103 if evalxfail.istrue():
104 if not evalxfail.get("run", True):
105 xfail("[NOTRUN] " + evalxfail.getexplanation())
108def check_strict_xfail(pyfuncitem):
109 """check xfail(strict=True) for the given PASSING test"""
110 evalxfail = pyfuncitem._evalxfail
111 if evalxfail.istrue():
112 strict_default = pyfuncitem.config.getini("xfail_strict")
113 is_strict_xfail = evalxfail.get("strict", strict_default)
114 if is_strict_xfail:
115 del pyfuncitem._evalxfail
116 explanation = evalxfail.getexplanation()
117 fail("[XPASS(strict)] " + explanation, pytrace=False)
120@hookimpl(hookwrapper=True)
121def pytest_runtest_makereport(item, call):
122 outcome = yield
123 rep = outcome.get_result()
124 evalxfail = getattr(item, "_evalxfail", None)
125 # unittest special case, see setting of _unexpectedsuccess
126 if hasattr(item, "_unexpectedsuccess") and rep.when == "call":
128 if item._unexpectedsuccess:
129 rep.longrepr = "Unexpected success: {}".format(item._unexpectedsuccess)
130 else:
131 rep.longrepr = "Unexpected success"
132 rep.outcome = "failed"
134 elif item.config.option.runxfail:
135 pass # don't interfere
136 elif call.excinfo and call.excinfo.errisinstance(xfail.Exception):
137 rep.wasxfail = "reason: " + call.excinfo.value.msg
138 rep.outcome = "skipped"
139 elif evalxfail and not rep.skipped and evalxfail.wasvalid() and evalxfail.istrue():
140 if call.excinfo:
141 if evalxfail.invalidraise(call.excinfo.value):
142 rep.outcome = "failed"
143 else:
144 rep.outcome = "skipped"
145 rep.wasxfail = evalxfail.getexplanation()
146 elif call.when == "call":
147 strict_default = item.config.getini("xfail_strict")
148 is_strict_xfail = evalxfail.get("strict", strict_default)
149 explanation = evalxfail.getexplanation()
150 if is_strict_xfail:
151 rep.outcome = "failed"
152 rep.longrepr = "[XPASS(strict)] {}".format(explanation)
153 else:
154 rep.outcome = "passed"
155 rep.wasxfail = explanation
156 elif (
157 getattr(item, "_skipped_by_mark", False)
158 and rep.skipped
159 and type(rep.longrepr) is tuple
160 ):
161 # skipped by mark.skipif; change the location of the failure
162 # to point to the item definition, otherwise it will display
163 # the location of where the skip exception was raised within pytest
164 _, _, reason = rep.longrepr
165 filename, line = item.location[:2]
166 rep.longrepr = filename, line + 1, reason
169# called by terminalreporter progress reporting
172def pytest_report_teststatus(report):
173 if hasattr(report, "wasxfail"):
174 if report.skipped:
175 return "xfailed", "x", "XFAIL"
176 elif report.passed:
177 return "xpassed", "X", "XPASS"