File size: 8,402 Bytes
0ad74ed
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
"""
Pytest configuration and fixtures for the Numpy test suite.
"""
import os
import sys
import tempfile
from contextlib import contextmanager
import warnings

import hypothesis
import pytest
import numpy

from numpy._core._multiarray_tests import get_fpu_mode
from numpy.testing._private.utils import NOGIL_BUILD

try:
    from scipy_doctest.conftest import dt_config
    HAVE_SCPDT = True
except ModuleNotFoundError:
    HAVE_SCPDT = False


_old_fpu_mode = None
_collect_results = {}

# Use a known and persistent tmpdir for hypothesis' caches, which
# can be automatically cleared by the OS or user.
hypothesis.configuration.set_hypothesis_home_dir(
    os.path.join(tempfile.gettempdir(), ".hypothesis")
)

# We register two custom profiles for Numpy - for details see
# https://hypothesis.readthedocs.io/en/latest/settings.html
# The first is designed for our own CI runs; the latter also 
# forces determinism and is designed for use via np.test()
hypothesis.settings.register_profile(
    name="numpy-profile", deadline=None, print_blob=True,
)
hypothesis.settings.register_profile(
    name="np.test() profile",
    deadline=None, print_blob=True, database=None, derandomize=True,
    suppress_health_check=list(hypothesis.HealthCheck),
)
# Note that the default profile is chosen based on the presence 
# of pytest.ini, but can be overridden by passing the 
# --hypothesis-profile=NAME argument to pytest.
_pytest_ini = os.path.join(os.path.dirname(__file__), "..", "pytest.ini")
hypothesis.settings.load_profile(
    "numpy-profile" if os.path.isfile(_pytest_ini) else "np.test() profile"
)

# The experimentalAPI is used in _umath_tests
os.environ["NUMPY_EXPERIMENTAL_DTYPE_API"] = "1"

def pytest_configure(config):
    config.addinivalue_line("markers",
        "valgrind_error: Tests that are known to error under valgrind.")
    config.addinivalue_line("markers",
        "leaks_references: Tests that are known to leak references.")
    config.addinivalue_line("markers",
        "slow: Tests that are very slow.")
    config.addinivalue_line("markers",
        "slow_pypy: Tests that are very slow on pypy.")


def pytest_addoption(parser):
    parser.addoption("--available-memory", action="store", default=None,
                     help=("Set amount of memory available for running the "
                           "test suite. This can result to tests requiring "
                           "especially large amounts of memory to be skipped. "
                           "Equivalent to setting environment variable "
                           "NPY_AVAILABLE_MEM. Default: determined"
                           "automatically."))


gil_enabled_at_start = True
if NOGIL_BUILD:
    gil_enabled_at_start = sys._is_gil_enabled()


def pytest_sessionstart(session):
    available_mem = session.config.getoption('available_memory')
    if available_mem is not None:
        os.environ['NPY_AVAILABLE_MEM'] = available_mem


def pytest_terminal_summary(terminalreporter, exitstatus, config):
    if NOGIL_BUILD and not gil_enabled_at_start and sys._is_gil_enabled():
        tr = terminalreporter
        tr.ensure_newline()
        tr.section("GIL re-enabled", sep="=", red=True, bold=True)
        tr.line("The GIL was re-enabled at runtime during the tests.")
        tr.line("This can happen with no test failures if the RuntimeWarning")
        tr.line("raised by Python when this happens is filtered by a test.")
        tr.line("")
        tr.line("Please ensure all new C modules declare support for running")
        tr.line("without the GIL. Any new tests that intentionally imports ")
        tr.line("code that re-enables the GIL should do so in a subprocess.")
        pytest.exit("GIL re-enabled during tests", returncode=1)

#FIXME when yield tests are gone.
@pytest.hookimpl()
def pytest_itemcollected(item):
    """
    Check FPU precision mode was not changed during test collection.

    The clumsy way we do it here is mainly necessary because numpy
    still uses yield tests, which can execute code at test collection
    time.
    """
    global _old_fpu_mode

    mode = get_fpu_mode()

    if _old_fpu_mode is None:
        _old_fpu_mode = mode
    elif mode != _old_fpu_mode:
        _collect_results[item] = (_old_fpu_mode, mode)
        _old_fpu_mode = mode


@pytest.fixture(scope="function", autouse=True)
def check_fpu_mode(request):
    """
    Check FPU precision mode was not changed during the test.
    """
    old_mode = get_fpu_mode()
    yield
    new_mode = get_fpu_mode()

    if old_mode != new_mode:
        raise AssertionError("FPU precision mode changed from {0:#x} to {1:#x}"
                             " during the test".format(old_mode, new_mode))

    collect_result = _collect_results.get(request.node)
    if collect_result is not None:
        old_mode, new_mode = collect_result
        raise AssertionError("FPU precision mode changed from {0:#x} to {1:#x}"
                             " when collecting the test".format(old_mode,
                                                                new_mode))


@pytest.fixture(autouse=True)
def add_np(doctest_namespace):
    doctest_namespace['np'] = numpy

@pytest.fixture(autouse=True)
def env_setup(monkeypatch):
    monkeypatch.setenv('PYTHONHASHSEED', '0')


@pytest.fixture(params=[True, False])
def weak_promotion(request):
    """
    Fixture to ensure "legacy" promotion state or change it to use the new
    weak promotion (plus warning).  `old_promotion` should be used as a
    parameter in the function.
    """
    state = numpy._get_promotion_state()
    if request.param:
        numpy._set_promotion_state("weak_and_warn")
    else:
        numpy._set_promotion_state("legacy")

    yield request.param
    numpy._set_promotion_state(state)


if HAVE_SCPDT:

    @contextmanager
    def warnings_errors_and_rng(test=None):
        """Filter out the wall of DeprecationWarnings.
        """
        msgs = ["The numpy.linalg.linalg",
                "The numpy.fft.helper",
                "dep_util",
                "pkg_resources",
                "numpy.core.umath",
                "msvccompiler",
                "Deprecated call",
                "numpy.core",
                "`np.compat`",
                "Importing from numpy.matlib",
                "This function is deprecated.",    # random_integers
                "Data type alias 'a'",     # numpy.rec.fromfile
                "Arrays of 2-dimensional vectors",   # matlib.cross
                "`in1d` is deprecated", ]
        msg = "|".join(msgs)

        msgs_r = [
            "invalid value encountered",
            "divide by zero encountered"
        ]
        msg_r = "|".join(msgs_r)

        with warnings.catch_warnings():
            warnings.filterwarnings(
                'ignore', category=DeprecationWarning, message=msg
            )
            warnings.filterwarnings(
                'ignore', category=RuntimeWarning, message=msg_r
            )
            yield

    # find and check doctests under this context manager
    dt_config.user_context_mgr = warnings_errors_and_rng

    # numpy specific tweaks from refguide-check
    dt_config.rndm_markers.add('#uninitialized')
    dt_config.rndm_markers.add('# uninitialized')

    import doctest
    dt_config.optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS

    # recognize the StringDType repr
    dt_config.check_namespace['StringDType'] = numpy.dtypes.StringDType

    # temporary skips
    dt_config.skiplist = set([
        'numpy.savez',    # unclosed file
        'numpy.matlib.savez',
        'numpy.__array_namespace_info__',
        'numpy.matlib.__array_namespace_info__',
    ])

    # xfail problematic tutorials
    dt_config.pytest_extra_xfail = {
        'how-to-verify-bug.rst': '',
        'c-info.ufunc-tutorial.rst': '',
        'basics.interoperability.rst': 'needs pandas',
        'basics.dispatch.rst': 'errors out in /testing/overrides.py',
        'basics.subclassing.rst': '.. testcode:: admonitions not understood',
        'misc.rst': 'manipulates warnings',
    }

    # ignores are for things fail doctest collection (optionals etc)
    dt_config.pytest_extra_ignore = [
        'numpy/distutils',
        'numpy/_core/cversions.py',
        'numpy/_pyinstaller',
        'numpy/random/_examples',
        'numpy/compat',
        'numpy/f2py/_backends/_distutils.py',
    ]