Loading tests/conftest.py +15 −0 Original line number Diff line number Diff line Loading @@ -47,6 +47,7 @@ from shutil import move import tempfile from typing import Optional, Union, List import numpy as np from _pytest.outcomes import Skipped, Failed from .constants import ( DMX_DIFF, DMX_MLD, Loading Loading @@ -446,6 +447,20 @@ def test_info(request): if hasattr(request, "error"): pytest.fail(request.error) @pytest.hookimpl(hookwrapper=True) def pytest_runtest_makereport(item, call): # Use hook to catch exceptions outcome = yield report = outcome.get_result() test_info = item.funcargs.get("test_info", None) if call.excinfo is not None and report.when == "call": # Make sure exception is not due to a skipped or failed test (allowed exceptions) type = call.excinfo.type xfail = hasattr(report, "wasxfail") if type not in [Skipped, Failed] and not xfail: # Capture exception in test_info test_info.error = str(call.excinfo.value) @pytest.fixture(scope="session") def split_comparison(request): Loading Loading
tests/conftest.py +15 −0 Original line number Diff line number Diff line Loading @@ -47,6 +47,7 @@ from shutil import move import tempfile from typing import Optional, Union, List import numpy as np from _pytest.outcomes import Skipped, Failed from .constants import ( DMX_DIFF, DMX_MLD, Loading Loading @@ -446,6 +447,20 @@ def test_info(request): if hasattr(request, "error"): pytest.fail(request.error) @pytest.hookimpl(hookwrapper=True) def pytest_runtest_makereport(item, call): # Use hook to catch exceptions outcome = yield report = outcome.get_result() test_info = item.funcargs.get("test_info", None) if call.excinfo is not None and report.when == "call": # Make sure exception is not due to a skipped or failed test (allowed exceptions) type = call.excinfo.type xfail = hasattr(report, "wasxfail") if type not in [Skipped, Failed] and not xfail: # Capture exception in test_info test_info.error = str(call.excinfo.value) @pytest.fixture(scope="session") def split_comparison(request): Loading