summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDongHun Kwak <dh0128.kwak@samsung.com>2020-12-31 09:41:54 +0900
committerDongHun Kwak <dh0128.kwak@samsung.com>2020-12-31 09:41:54 +0900
commit7da6c35e6b03a56959d70ecbdf4812cd480cb549 (patch)
tree2515e3dbb938b674007bc16804c10575b598c5de
parente1f563b48f6c6abe918994862ed40e1bc5e8194a (diff)
downloadpython-numpy-upstream/1.19.3.tar.gz
python-numpy-upstream/1.19.3.tar.bz2
python-numpy-upstream/1.19.3.zip
Imported Upstream version 1.19.3upstream/1.19.3
-rw-r--r--.travis.yml2
-rw-r--r--azure-pipelines.yml13
-rw-r--r--azure-steps-windows.yml25
-rw-r--r--doc/changelog/1.19.3-changelog.rst31
-rw-r--r--doc/source/release.rst1
-rw-r--r--doc/source/release/1.19.3-notes.rst46
-rw-r--r--numpy/core/setup.py12
-rw-r--r--numpy/core/src/multiarray/buffer.c105
-rw-r--r--numpy/core/src/multiarray/usertypes.c3
-rw-r--r--numpy/core/tests/test_multiarray.py24
-rw-r--r--numpy/distutils/command/autodist.py33
-rw-r--r--numpy/distutils/command/config.py14
-rw-r--r--numpy/distutils/fcompiler/__init__.py2
-rw-r--r--numpy/distutils/fcompiler/nv.py55
-rw-r--r--numpy/random/_examples/cython/setup.py4
-rw-r--r--numpy/random/tests/test_extending.py1
-rw-r--r--numpy/tests/test_public_api.py1
-rw-r--r--pavement.py2
-rw-r--r--pyproject.toml4
-rw-r--r--pytest.ini3
-rwxr-xr-xsetup.py65
-rw-r--r--test_requirements.txt6
-rw-r--r--tools/openblas_support.py199
23 files changed, 490 insertions, 161 deletions
diff --git a/.travis.yml b/.travis.yml
index b863660f7..04a0ab423 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -46,7 +46,7 @@ jobs:
- stage: Comprehensive tests
python: 3.6
- python: 3.7
- - python: 3.9-dev
+ - python: 3.9
- python: 3.6
env: USE_DEBUG=1
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 80b5b9770..64f22866c 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -71,6 +71,9 @@ stages:
Python36:
PYTHON_VERSION: '3.6'
USE_OPENBLAS: '1'
+ Python39:
+ PYTHON_VERSION: '3.9'
+ USE_OPENBLAS: '1'
Python36-ILP64:
PYTHON_VERSION: '3.6'
NPY_USE_BLAS_ILP64: '1'
@@ -208,6 +211,11 @@ stages:
PYTHON_ARCH: 'x86'
TEST_MODE: fast
BITS: 32
+ Python39-32bit-fast:
+ PYTHON_VERSION: '3.9'
+ PYTHON_ARCH: 'x86'
+ TEST_MODE: fast
+ BITS: 32
Python36-64bit-full:
PYTHON_VERSION: '3.6'
PYTHON_ARCH: 'x64'
@@ -223,6 +231,11 @@ stages:
PYTHON_ARCH: 'x64'
TEST_MODE: full
BITS: 64
+ Python39-64bit-full:
+ PYTHON_VERSION: '3.9'
+ PYTHON_ARCH: 'x64'
+ TEST_MODE: full
+ BITS: 64
NPY_USE_BLAS_ILP64: '1'
OPENBLAS_SUFFIX: '64_'
steps:
diff --git a/azure-steps-windows.yml b/azure-steps-windows.yml
index 04c578f7c..7a0e98adf 100644
--- a/azure-steps-windows.yml
+++ b/azure-steps-windows.yml
@@ -4,20 +4,22 @@ steps:
versionSpec: $(PYTHON_VERSION)
addToPath: true
architecture: $(PYTHON_ARCH)
-- script: python -m pip install --upgrade pip
+
+- script: python -m pip install --upgrade pip wheel
displayName: 'Install tools'
+
- script: python -m pip install -r test_requirements.txt
displayName: 'Install dependencies; some are optional to avoid test skips'
+
- powershell: |
$ErrorActionPreference = "Stop"
# Download and get the path to "openblas.a". We cannot copy it
# to $PYTHON_EXE's directory since that is on a different drive which
# mingw does not like. Instead copy it to a directory and set OPENBLAS,
# since OPENBLAS will be picked up by the openblas discovery
- python -m pip install
$target = $(python tools/openblas_support.py)
mkdir openblas
- echo Copying $target to openblas/openblas$env:OPENBLAS_SUFFIX.a
+ echo "Copying $target to openblas/openblas$env:OPENBLAS_SUFFIX.a"
cp $target openblas/openblas$env:OPENBLAS_SUFFIX.a
If ( Test-Path env:NPY_USE_BLAS_ILP64 ){
echo "##vso[task.setvariable variable=OPENBLAS64_]$pwd\openblas"
@@ -27,36 +29,41 @@ steps:
displayName: 'Download / Install OpenBLAS'
- powershell: |
- choco install -y mingw --forcex86 --force --version=5.3.0
+ choco install -y mingw --forcex86 --force --version=7.3.0
+ refreshenv
displayName: 'Install 32-bit mingw for 32-bit builds'
condition: eq(variables['BITS'], 32)
# NOTE: for Windows builds it seems much more tractable to use runtests.py
# vs. manual setup.py and then runtests.py for testing only
+
- powershell: |
If ($(BITS) -eq 32) {
$env:CFLAGS = "-m32"
$env:LDFLAGS = "-m32"
- $env:PATH = "C:\\tools\\mingw32\\bin;" + $env:PATH
- refreshenv
+ $env:PATH = "C:\\ProgramData\\chocolatey\\lib\\mingw\\tools\\install\\mingw$(BITS)\\bin;" + $env:PATH
}
python -c "from tools import openblas_support; openblas_support.make_init('numpy')"
- pip wheel -v -v -v --no-build-isolation --no-use-pep517 --wheel-dir=dist .
+ python -m pip wheel -v -v -v --no-build-isolation --no-use-pep517 --wheel-dir=dist .
ls dist -r | Foreach-Object {
- pip install $_.FullName
+ python -m pip install $_.FullName
}
displayName: 'Build NumPy'
+
- bash: |
pushd . && cd .. && target=$(python -c "import numpy, os; print(os.path.abspath(os.path.join(os.path.dirname(numpy.__file__), '.libs')))") && popd
- pip download -d destination --only-binary :all: --no-deps numpy==1.14
+ python -m pip download -d destination --only-binary :all: --no-deps numpy==1.14
cd destination && unzip numpy*.whl && cp numpy/.libs/*.dll $target
ls $target
displayName: 'Add extraneous & older DLL to numpy/.libs to probe DLL handling robustness'
condition: eq(variables['PYTHON_VERSION'], '3.6')
- script: pushd . && cd .. && python -c "from ctypes import windll; windll.kernel32.SetDefaultDllDirectories(0x00000800); import numpy" && popd
displayName: 'For gh-12667; Windows DLL resolution'
+ condition: eq(variables['PYTHON_VERSION'], '3.6')
+
- script: python runtests.py -n --show-build-log --mode=$(TEST_MODE) -- -rsx --junitxml=junit/test-results.xml
displayName: 'Run NumPy Test Suite'
+
- task: PublishTestResults@2
condition: succeededOrFailed()
inputs:
diff --git a/doc/changelog/1.19.3-changelog.rst b/doc/changelog/1.19.3-changelog.rst
new file mode 100644
index 000000000..5e8dfa10b
--- /dev/null
+++ b/doc/changelog/1.19.3-changelog.rst
@@ -0,0 +1,31 @@
+
+Contributors
+============
+
+A total of 8 people contributed to this release. People with a "+" by their
+names contributed a patch for the first time.
+
+* Charles Harris
+* Chris Brown +
+* Daniel Vanzo +
+* E. Madison Bray +
+* Hugo van Kemenade +
+* Ralf Gommers
+* Sebastian Berg
+* @danbeibei +
+
+Pull requests merged
+====================
+
+A total of 10 pull requests were merged for this release.
+
+* `#17298 <https://github.com/numpy/numpy/pull/17298>`__: BLD: set upper versions for build dependencies
+* `#17336 <https://github.com/numpy/numpy/pull/17336>`__: BUG: Set deprecated fields to null in PyArray_InitArrFuncs
+* `#17446 <https://github.com/numpy/numpy/pull/17446>`__: ENH: Warn on unsupported Python 3.10+
+* `#17450 <https://github.com/numpy/numpy/pull/17450>`__: MAINT: Update test_requirements.txt.
+* `#17522 <https://github.com/numpy/numpy/pull/17522>`__: ENH: Support for the NVIDIA HPC SDK nvfortran compiler
+* `#17568 <https://github.com/numpy/numpy/pull/17568>`__: BUG: Cygwin Workaround for #14787 on affected platforms
+* `#17647 <https://github.com/numpy/numpy/pull/17647>`__: BUG: Fix memory leak of buffer-info cache due to relaxed strides
+* `#17652 <https://github.com/numpy/numpy/pull/17652>`__: MAINT: Backport openblas_support from master.
+* `#17653 <https://github.com/numpy/numpy/pull/17653>`__: TST: Add Python 3.9 to the CI testing on Windows, Mac.
+* `#17660 <https://github.com/numpy/numpy/pull/17660>`__: TST: Simplify source path names in test_extending.
diff --git a/doc/source/release.rst b/doc/source/release.rst
index a1d17d364..6f70c9fc3 100644
--- a/doc/source/release.rst
+++ b/doc/source/release.rst
@@ -5,6 +5,7 @@ Release Notes
.. toctree::
:maxdepth: 3
+ 1.19.3 <release/1.19.3-notes>
1.19.2 <release/1.19.2-notes>
1.19.1 <release/1.19.1-notes>
1.19.0 <release/1.19.0-notes>
diff --git a/doc/source/release/1.19.3-notes.rst b/doc/source/release/1.19.3-notes.rst
new file mode 100644
index 000000000..290bfdae3
--- /dev/null
+++ b/doc/source/release/1.19.3-notes.rst
@@ -0,0 +1,46 @@
+.. currentmodule:: numpy
+
+==========================
+NumPy 1.19.3 Release Notes
+==========================
+
+NumPy 1.19.3 is a small maintenace release with two major improvements:
+
+- Python 3.9 binary wheels on all supported platforms.
+- OpenBLAS fixes for Windows 10 version 2004 fmod bug.
+
+This release supports Python 3.6-3.9 and is linked with OpenBLAS 3.7 to avoid
+some of the fmod problems on Windows 10 version 2004. Microsoft is aware of the
+problem and users should upgrade when the fix becomes available, the fix here
+is limited in scope.
+
+Contributors
+============
+
+A total of 8 people contributed to this release. People with a "+" by their
+names contributed a patch for the first time.
+
+* Charles Harris
+* Chris Brown +
+* Daniel Vanzo +
+* E. Madison Bray +
+* Hugo van Kemenade +
+* Ralf Gommers
+* Sebastian Berg
+* @danbeibei +
+
+Pull requests merged
+====================
+
+A total of 10 pull requests were merged for this release.
+
+* `#17298 <https://github.com/numpy/numpy/pull/17298>`__: BLD: set upper versions for build dependencies
+* `#17336 <https://github.com/numpy/numpy/pull/17336>`__: BUG: Set deprecated fields to null in PyArray_InitArrFuncs
+* `#17446 <https://github.com/numpy/numpy/pull/17446>`__: ENH: Warn on unsupported Python 3.10+
+* `#17450 <https://github.com/numpy/numpy/pull/17450>`__: MAINT: Update test_requirements.txt.
+* `#17522 <https://github.com/numpy/numpy/pull/17522>`__: ENH: Support for the NVIDIA HPC SDK nvfortran compiler
+* `#17568 <https://github.com/numpy/numpy/pull/17568>`__: BUG: Cygwin Workaround for #14787 on affected platforms
+* `#17647 <https://github.com/numpy/numpy/pull/17647>`__: BUG: Fix memory leak of buffer-info cache due to relaxed strides
+* `#17652 <https://github.com/numpy/numpy/pull/17652>`__: MAINT: Backport openblas_support from master.
+* `#17653 <https://github.com/numpy/numpy/pull/17653>`__: TST: Add Python 3.9 to the CI testing on Windows, Mac.
+* `#17660 <https://github.com/numpy/numpy/pull/17660>`__: TST: Simplify source path names in test_extending.
diff --git a/numpy/core/setup.py b/numpy/core/setup.py
index fcc422545..0c20bfd2f 100644
--- a/numpy/core/setup.py
+++ b/numpy/core/setup.py
@@ -105,7 +105,7 @@ def win32_checks(deflist):
if a == "Intel" or a == "AMD64":
deflist.append('FORCE_NO_LONG_DOUBLE_FORMATTING')
-def check_math_capabilities(config, moredefs, mathlibs):
+def check_math_capabilities(config, ext, moredefs, mathlibs):
def check_func(func_name):
return config.check_func(func_name, libraries=mathlibs,
decl=True, call=True)
@@ -170,6 +170,14 @@ def check_math_capabilities(config, moredefs, mathlibs):
for dec, fn in OPTIONAL_FUNCTION_ATTRIBUTES:
if config.check_gcc_function_attribute(dec, fn):
moredefs.append((fname2def(fn), 1))
+ if fn == 'attribute_target_avx512f':
+ # GH-14787: Work around GCC<8.4 bug when compiling with AVX512
+ # support on Windows-based platforms
+ if (sys.platform in ('win32', 'cygwin') and
+ config.check_compiler_gcc() and
+ not config.check_gcc_version_at_least(8, 4)):
+ ext.extra_compile_args.extend(
+ ['-ffixed-xmm%s' % n for n in range(16, 32)])
for dec, fn, code, header in OPTIONAL_FUNCTION_ATTRIBUTES_WITH_INTRINSICS:
if config.check_gcc_function_attribute_with_intrinsics(dec, fn, code,
@@ -431,7 +439,7 @@ def configuration(parent_package='',top_path=None):
mathlibs = check_mathlib(config_cmd)
moredefs.append(('MATHLIB', ','.join(mathlibs)))
- check_math_capabilities(config_cmd, moredefs, mathlibs)
+ check_math_capabilities(config_cmd, ext, moredefs, mathlibs)
moredefs.extend(cocache.check_ieee_macros(config_cmd)[0])
moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0])
diff --git a/numpy/core/src/multiarray/buffer.c b/numpy/core/src/multiarray/buffer.c
index 9a1f7b230..8ce852fc7 100644
--- a/numpy/core/src/multiarray/buffer.c
+++ b/numpy/core/src/multiarray/buffer.c
@@ -443,7 +443,7 @@ static PyObject *_buffer_info_cache = NULL;
/* Fill in the info structure */
static _buffer_info_t*
-_buffer_info_new(PyObject *obj)
+_buffer_info_new(PyObject *obj, npy_bool f_contiguous)
{
_buffer_info_t *info;
_tmp_string_t fmt = {NULL, 0, 0};
@@ -513,9 +513,43 @@ _buffer_info_new(PyObject *obj)
goto fail;
}
info->strides = info->shape + PyArray_NDIM(arr);
- for (k = 0; k < PyArray_NDIM(arr); ++k) {
- info->shape[k] = PyArray_DIMS(arr)[k];
- info->strides[k] = PyArray_STRIDES(arr)[k];
+
+#if NPY_RELAXED_STRIDES_CHECKING
+ /*
+ * When NPY_RELAXED_STRIDES_CHECKING is used, some buffer users
+ * may expect a contiguous buffer to have well formatted strides
+ * also when a dimension is 1, but we do not guarantee this
+ * internally. Thus, recalculate strides for contiguous arrays.
+ * (This is unnecessary, but has no effect in the case where
+ * NPY_RELAXED_STRIDES CHECKING is disabled.)
+ */
+ if (PyArray_IS_C_CONTIGUOUS(arr) && !(
+ f_contiguous && PyArray_IS_F_CONTIGUOUS(arr))) {
+ Py_ssize_t sd = PyArray_ITEMSIZE(arr);
+ for (k = info->ndim-1; k >= 0; --k) {
+ info->shape[k] = PyArray_DIMS(arr)[k];
+ info->strides[k] = sd;
+ sd *= info->shape[k];
+ }
+ }
+ else if (PyArray_IS_F_CONTIGUOUS(arr)) {
+ Py_ssize_t sd = PyArray_ITEMSIZE(arr);
+ for (k = 0; k < info->ndim; ++k) {
+ info->shape[k] = PyArray_DIMS(arr)[k];
+ info->strides[k] = sd;
+ sd *= info->shape[k];
+ }
+ }
+ else {
+#else /* NPY_RELAXED_STRIDES_CHECKING */
+ /* We can always use the arrays strides directly */
+ {
+#endif
+
+ for (k = 0; k < PyArray_NDIM(arr); ++k) {
+ info->shape[k] = PyArray_DIMS(arr)[k];
+ info->strides[k] = PyArray_STRIDES(arr)[k];
+ }
}
}
Py_INCREF(descr);
@@ -579,7 +613,7 @@ _buffer_info_free(_buffer_info_t *info)
/* Get buffer info from the global dictionary */
static _buffer_info_t*
-_buffer_get_info(PyObject *obj)
+_buffer_get_info(PyObject *obj, npy_bool f_contiguous)
{
PyObject *key = NULL, *item_list = NULL, *item = NULL;
_buffer_info_t *info = NULL, *old_info = NULL;
@@ -592,7 +626,7 @@ _buffer_get_info(PyObject *obj)
}
/* Compute information */
- info = _buffer_info_new(obj);
+ info = _buffer_info_new(obj, f_contiguous);
if (info == NULL) {
return NULL;
}
@@ -605,15 +639,35 @@ _buffer_get_info(PyObject *obj)
item_list = PyDict_GetItem(_buffer_info_cache, key);
if (item_list != NULL) {
+ Py_ssize_t item_list_length = PyList_GET_SIZE(item_list);
Py_INCREF(item_list);
- if (PyList_GET_SIZE(item_list) > 0) {
- item = PyList_GetItem(item_list, PyList_GET_SIZE(item_list) - 1);
+ if (item_list_length > 0) {
+ item = PyList_GetItem(item_list, item_list_length - 1);
old_info = (_buffer_info_t*)PyLong_AsVoidPtr(item);
-
if (_buffer_info_cmp(info, old_info) == 0) {
_buffer_info_free(info);
info = old_info;
}
+ else {
+ if (item_list_length > 1 && info->ndim > 1) {
+ /*
+ * Some arrays are C- and F-contiguous and if they have more
+ * than one dimension, the buffer-info may differ between
+ * the two due to RELAXED_STRIDES_CHECKING.
+ * If we export both buffers, the first stored one may be
+ * the one for the other contiguity, so check both.
+ * This is generally very unlikely in all other cases, since
+ * in all other cases the first one will match unless array
+ * metadata was modified in-place (which is discouraged).
+ */
+ item = PyList_GetItem(item_list, item_list_length - 2);
+ old_info = (_buffer_info_t*)PyLong_AsVoidPtr(item);
+ if (_buffer_info_cmp(info, old_info) == 0) {
+ _buffer_info_free(info);
+ info = old_info;
+ }
+ }
+ }
}
}
else {
@@ -720,7 +774,7 @@ array_getbuffer(PyObject *obj, Py_buffer *view, int flags)
}
/* Fill in information */
- info = _buffer_get_info(obj);
+ info = _buffer_get_info(obj, (flags & PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS);
if (info == NULL) {
goto fail;
}
@@ -756,35 +810,6 @@ array_getbuffer(PyObject *obj, Py_buffer *view, int flags)
}
if ((flags & PyBUF_STRIDES) == PyBUF_STRIDES) {
view->strides = info->strides;
-
-#ifdef NPY_RELAXED_STRIDES_CHECKING
- /*
- * If NPY_RELAXED_STRIDES_CHECKING is on, the array may be
- * contiguous, but it won't look that way to Python when it
- * tries to determine contiguity by looking at the strides
- * (since one of the elements may be -1). In that case, just
- * regenerate strides from shape.
- */
- if (PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS) &&
- !((flags & PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS)) {
- Py_ssize_t sd = view->itemsize;
- int i;
-
- for (i = view->ndim-1; i >= 0; --i) {
- view->strides[i] = sd;
- sd *= view->shape[i];
- }
- }
- else if (PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)) {
- Py_ssize_t sd = view->itemsize;
- int i;
-
- for (i = 0; i < view->ndim; ++i) {
- view->strides[i] = sd;
- sd *= view->shape[i];
- }
- }
-#endif
}
else {
view->strides = NULL;
@@ -814,7 +839,7 @@ gentype_getbuffer(PyObject *self, Py_buffer *view, int flags)
}
/* Fill in information */
- info = _buffer_get_info(self);
+ info = _buffer_get_info(self, 0);
if (info == NULL) {
goto fail;
}
diff --git a/numpy/core/src/multiarray/usertypes.c b/numpy/core/src/multiarray/usertypes.c
index 997467b4d..f9c3c057c 100644
--- a/numpy/core/src/multiarray/usertypes.c
+++ b/numpy/core/src/multiarray/usertypes.c
@@ -126,6 +126,9 @@ PyArray_InitArrFuncs(PyArray_ArrFuncs *f)
f->scalarkind = NULL;
f->cancastscalarkindto = NULL;
f->cancastto = NULL;
+ f->fastclip = NULL;
+ f->fastputmask = NULL;
+ f->fasttake = NULL;
}
diff --git a/numpy/core/tests/test_multiarray.py b/numpy/core/tests/test_multiarray.py
index 002603ef0..2e8db7518 100644
--- a/numpy/core/tests/test_multiarray.py
+++ b/numpy/core/tests/test_multiarray.py
@@ -7179,9 +7179,10 @@ class TestNewBufferProtocol:
x3 = np.arange(dt3.itemsize, dtype=np.int8).view(dt3)
self._check_roundtrip(x3)
- def test_relaxed_strides(self):
- # Test that relaxed strides are converted to non-relaxed
- c = np.ones((1, 10, 10), dtype='i8')
+ @pytest.mark.valgrind_error(reason="leaks buffer info cache temporarily.")
+ def test_relaxed_strides(self, c=np.ones((1, 10, 10), dtype='i8')):
+ # Note: c defined as parameter so that it is persistent and leak
+ # checks will notice gh-16934 (buffer info cache leak).
# Check for NPY_RELAXED_STRIDES_CHECKING:
if np.ones((10, 1), order="C").flags.f_contiguous:
@@ -7206,6 +7207,23 @@ class TestNewBufferProtocol:
arr, ['C_CONTIGUOUS'])
assert_(strides[-1] == 8)
+ @pytest.mark.valgrind_error(reason="leaks buffer info cache temporarily.")
+ @pytest.mark.skipif(not np.ones((10, 1), order="C").flags.f_contiguous,
+ reason="Test is unnecessary (but fails) without relaxed strides.")
+ def test_relaxed_strides_buffer_info_leak(self, arr=np.ones((1, 10))):
+ """Test that alternating export of C- and F-order buffers from
+ an array which is both C- and F-order when relaxed strides is
+ active works.
+ This test defines array in the signature to ensure leaking more
+ references every time the test is run (catching the leak with
+ pytest-leaks).
+ """
+ for i in range(10):
+ _, s = _multiarray_tests.get_buffer_info(arr, ['F_CONTIGUOUS'])
+ assert s == (8, 8)
+ _, s = _multiarray_tests.get_buffer_info(arr, ['C_CONTIGUOUS'])
+ assert s == (80, 8)
+
def test_out_of_order_fields(self):
dt = np.dtype(dict(
formats=['<i4', '<i4'],
diff --git a/numpy/distutils/command/autodist.py b/numpy/distutils/command/autodist.py
index 8f6436004..b72d0cab1 100644
--- a/numpy/distutils/command/autodist.py
+++ b/numpy/distutils/command/autodist.py
@@ -46,15 +46,16 @@ def check_restrict(cmd):
return ''
-def check_compiler_gcc4(cmd):
- """Return True if the C compiler is GCC 4.x."""
+def check_compiler_gcc(cmd):
+ """Check if the compiler is GCC."""
+
cmd._check_compiler()
body = textwrap.dedent("""
int
main()
{
- #if (! defined __GNUC__) || (__GNUC__ < 4)
- #error gcc >= 4 required
+ #if (! defined __GNUC__)
+ #error gcc required
#endif
return 0;
}
@@ -62,6 +63,30 @@ def check_compiler_gcc4(cmd):
return cmd.try_compile(body, None, None)
+def check_gcc_version_at_least(cmd, major, minor=0, patchlevel=0):
+ """
+ Check that the gcc version is at least the specified version."""
+
+ cmd._check_compiler()
+ version = '.'.join([str(major), str(minor), str(patchlevel)])
+ body = textwrap.dedent("""
+ int
+ main()
+ {
+ #if (! defined __GNUC__) || (__GNUC__ < %(major)d) || \\
+ (__GNUC_MINOR__ < %(minor)d) || \\
+ (__GNUC_PATCHLEVEL__ < %(patchlevel)d)
+ #error gcc >= %(version)s required
+ #endif
+ return 0;
+ }
+ """)
+ kw = {'version': version, 'major': major, 'minor': minor,
+ 'patchlevel': patchlevel}
+
+ return cmd.try_compile(body % kw, None, None)
+
+
def check_gcc_function_attribute(cmd, attribute, name):
"""Return True if the given function attribute is supported."""
cmd._check_compiler()
diff --git a/numpy/distutils/command/config.py b/numpy/distutils/command/config.py
index e54a54449..60881f4a3 100644
--- a/numpy/distutils/command/config.py
+++ b/numpy/distutils/command/config.py
@@ -20,9 +20,10 @@ from numpy.distutils.mingw32ccompiler import generate_manifest
from numpy.distutils.command.autodist import (check_gcc_function_attribute,
check_gcc_function_attribute_with_intrinsics,
check_gcc_variable_attribute,
+ check_gcc_version_at_least,
check_inline,
check_restrict,
- check_compiler_gcc4)
+ check_compiler_gcc)
LANG_EXT['f77'] = '.f'
LANG_EXT['f90'] = '.f90'
@@ -416,9 +417,9 @@ class config(old_config):
otherwise."""
return check_restrict(self)
- def check_compiler_gcc4(self):
- """Return True if the C compiler is gcc >= 4."""
- return check_compiler_gcc4(self)
+ def check_compiler_gcc(self):
+ """Return True if the C compiler is gcc"""
+ return check_compiler_gcc(self)
def check_gcc_function_attribute(self, attribute, name):
return check_gcc_function_attribute(self, attribute, name)
@@ -431,6 +432,11 @@ class config(old_config):
def check_gcc_variable_attribute(self, attribute):
return check_gcc_variable_attribute(self, attribute)
+ def check_gcc_version_at_least(self, major, minor=0, patchlevel=0):
+ """Return True if the GCC version is greater than or equal to the
+ specified version."""
+ return check_gcc_version_at_least(self, major, minor, patchlevel)
+
def get_output(self, body, headers=None, include_dirs=None,
libraries=None, library_dirs=None,
lang="c", use_tee=None):
diff --git a/numpy/distutils/fcompiler/__init__.py b/numpy/distutils/fcompiler/__init__.py
index 1c3069363..31c992d8f 100644
--- a/numpy/distutils/fcompiler/__init__.py
+++ b/numpy/distutils/fcompiler/__init__.py
@@ -745,7 +745,7 @@ _default_compilers = (
('win32', ('gnu', 'intelv', 'absoft', 'compaqv', 'intelev', 'gnu95', 'g95',
'intelvem', 'intelem', 'flang')),
('cygwin.*', ('gnu', 'intelv', 'absoft', 'compaqv', 'intelev', 'gnu95', 'g95')),
- ('linux.*', ('gnu95', 'intel', 'lahey', 'pg', 'absoft', 'nag', 'vast', 'compaq',
+ ('linux.*', ('gnu95', 'intel', 'lahey', 'pg', 'nv', 'absoft', 'nag', 'vast', 'compaq',
'intele', 'intelem', 'gnu', 'g95', 'pathf95', 'nagfor')),
('darwin.*', ('gnu95', 'nag', 'absoft', 'ibm', 'intel', 'gnu', 'g95', 'pg')),
('sunos.*', ('sun', 'gnu', 'gnu95', 'g95')),
diff --git a/numpy/distutils/fcompiler/nv.py b/numpy/distutils/fcompiler/nv.py
new file mode 100644
index 000000000..8e9f16835
--- /dev/null
+++ b/numpy/distutils/fcompiler/nv.py
@@ -0,0 +1,55 @@
+import sys
+
+from numpy.distutils.fcompiler import FCompiler
+
+compilers = ['NVHPCFCompiler']
+
+class NVHPCFCompiler(FCompiler):
+ """ NVIDIA High Performance Computing (HPC) SDK Fortran Compiler
+
+ https://developer.nvidia.com/hpc-sdk
+
+ Since august 2020 the NVIDIA HPC SDK includes the compilers formerly known as The Portland Group compilers,
+ https://www.pgroup.com/index.htm.
+ See also `numpy.distutils.fcompiler.pg`.
+ """
+
+ compiler_type = 'nv'
+ description = 'NVIDIA HPC SDK'
+ version_pattern = r'\s*(nvfortran|(pg(f77|f90|fortran)) \(aka nvfortran\)) (?P<version>[\d.-]+).*'
+
+ executables = {
+ 'version_cmd': ["<F90>", "-V"],
+ 'compiler_f77': ["nvfortran"],
+ 'compiler_fix': ["nvfortran", "-Mfixed"],
+ 'compiler_f90': ["nvfortran"],
+ 'linker_so': ["<F90>"],
+ 'archiver': ["ar", "-cr"],
+ 'ranlib': ["ranlib"]
+ }
+ pic_flags = ['-fpic']
+
+ module_dir_switch = '-module '
+ module_include_switch = '-I'
+
+ def get_flags(self):
+ opt = ['-Minform=inform', '-Mnosecond_underscore']
+ return self.pic_flags + opt
+
+ def get_flags_opt(self):
+ return ['-fast']
+
+ def get_flags_debug(self):
+ return ['-g']
+
+ def get_flags_linker_so(self):
+ return ["-shared", '-fpic']
+
+ def runtime_library_dir_option(self, dir):
+ return '-R%s' % dir
+
+if __name__ == '__main__':
+ from distutils import log
+ log.set_verbosity(2)
+ from numpy.distutils import customized_fcompiler
+ print(customized_fcompiler(compiler='nv').get_version())
diff --git a/numpy/random/_examples/cython/setup.py b/numpy/random/_examples/cython/setup.py
index 42425c2c1..83f06fde8 100644
--- a/numpy/random/_examples/cython/setup.py
+++ b/numpy/random/_examples/cython/setup.py
@@ -19,7 +19,7 @@ inc_path = np.get_include()
lib_path = join(np.get_include(), '..', '..', 'random', 'lib')
extending = Extension("extending",
- sources=[join(path, 'extending.pyx')],
+ sources=[join('.', 'extending.pyx')],
include_dirs=[
np.get_include(),
join(path, '..', '..')
@@ -27,7 +27,7 @@ extending = Extension("extending",
define_macros=defs,
)
distributions = Extension("extending_distributions",
- sources=[join(path, 'extending_distributions.pyx')],
+ sources=[join('.', 'extending_distributions.pyx')],
include_dirs=[inc_path],
library_dirs=[lib_path],
libraries=['npyrandom'],
diff --git a/numpy/random/tests/test_extending.py b/numpy/random/tests/test_extending.py
index 99a819efb..1d872a5be 100644
--- a/numpy/random/tests/test_extending.py
+++ b/numpy/random/tests/test_extending.py
@@ -40,6 +40,7 @@ else:
# too old or wrong cython, skip the test
cython = None
+
@pytest.mark.skipif(cython is None, reason="requires cython")
@pytest.mark.slow
def test_cython(tmp_path):
diff --git a/numpy/tests/test_public_api.py b/numpy/tests/test_public_api.py
index fb7ec5d83..716feef9f 100644
--- a/numpy/tests/test_public_api.py
+++ b/numpy/tests/test_public_api.py
@@ -246,6 +246,7 @@ PRIVATE_BUT_PRESENT_MODULES = ['numpy.' + s for s in [
"distutils.fcompiler.none",
"distutils.fcompiler.pathf95",
"distutils.fcompiler.pg",
+ "distutils.fcompiler.nv",
"distutils.fcompiler.sun",
"distutils.fcompiler.vast",
"distutils.from_template",
diff --git a/pavement.py b/pavement.py
index 928fda3a6..58431b95f 100644
--- a/pavement.py
+++ b/pavement.py
@@ -37,7 +37,7 @@ from paver.easy import Bunch, options, task, sh
#-----------------------------------
# Path to the release notes
-RELEASE_NOTES = 'doc/source/release/1.19.2-notes.rst'
+RELEASE_NOTES = 'doc/source/release/1.19.3-notes.rst'
#-------------------------------------------------------
diff --git a/pyproject.toml b/pyproject.toml
index 3566cc683..cfdbfa6c9 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -2,8 +2,8 @@
# Minimum requirements for the build system to execute.
requires = [
"setuptools<49.2.0",
- "wheel",
- "Cython>=0.29.21", # Note: keep in sync with tools/cythonize.py
+ "wheel<=0.35.1",
+ "Cython>=0.29.21,<3.0", # Note: keep in sync with tools/cythonize.py
]
diff --git a/pytest.ini b/pytest.ini
index 0db7f80dd..1296ed1a8 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -14,6 +14,3 @@ filterwarnings =
# Matrix PendingDeprecationWarning.
ignore:the matrix subclass is not
ignore:Importing from numpy.matlib is
-
-env =
- PYTHONHASHSEED=0
diff --git a/setup.py b/setup.py
index 7768e3371..139a35580 100755
--- a/setup.py
+++ b/setup.py
@@ -24,6 +24,8 @@ import sys
import subprocess
import textwrap
import sysconfig
+import warnings
+
if sys.version_info[:2] < (3, 6):
@@ -43,6 +45,7 @@ Programming Language :: Python :: 3
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
+Programming Language :: Python :: 3.9
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: Implementation :: CPython
Topic :: Software Development
@@ -55,10 +58,18 @@ Operating System :: MacOS
MAJOR = 1
MINOR = 19
-MICRO = 2
+MICRO = 3
ISRELEASED = True
VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
+# The first version not in the `Programming Language :: Python :: ...` classifiers above
+if sys.version_info >= (3, 10):
+ warnings.warn(
+ f"NumPy {VERSION} may not yet support Python "
+ f"{sys.version_info.major}.{sys.version_info.minor}.",
+ RuntimeWarning,
+ )
+
# Return the git revision as a string
def git_version():
@@ -88,6 +99,7 @@ def git_version():
return GIT_REVISION
+
# BEFORE importing setuptools, remove MANIFEST. Otherwise it may not be
# properly updated when the contents of directories change (true for distutils,
# not sure about setuptools).
@@ -150,7 +162,7 @@ if not release:
a.close()
-def configuration(parent_package='',top_path=None):
+def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration(None, parent_package, top_path)
@@ -163,7 +175,7 @@ def configuration(parent_package='',top_path=None):
config.add_data_files(('numpy', 'LICENSE.txt'))
config.add_data_files(('numpy', 'numpy/*.pxd'))
- config.get_version('numpy/version.py') # sets config.version
+ config.get_version('numpy/version.py') # sets config.version
return config
@@ -175,13 +187,12 @@ def check_submodules():
if not os.path.exists('.git'):
return
with open('.gitmodules') as f:
- for l in f:
- if 'path' in l:
- p = l.split('=')[-1].strip()
+ for line in f:
+ if 'path' in line:
+ p = line.split('=')[-1].strip()
if not os.path.exists(p):
raise ValueError('Submodule {} missing'.format(p))
-
proc = subprocess.Popen(['git', 'submodule', 'status'],
stdout=subprocess.PIPE)
status, _ = proc.communicate()
@@ -267,9 +278,9 @@ def generate_cython():
print("Cythonizing sources")
for d in ('random',):
p = subprocess.call([sys.executable,
- os.path.join(cwd, 'tools', 'cythonize.py'),
- 'numpy/{0}'.format(d)],
- cwd=cwd)
+ os.path.join(cwd, 'tools', 'cythonize.py'),
+ 'numpy/{0}'.format(d)],
+ cwd=cwd)
if p != 0:
raise RuntimeError("Running cythonize failed!")
@@ -340,7 +351,6 @@ def parse_setuppy_commands():
"""))
return False
-
# The following commands aren't supported. They can only be executed when
# the user explicitly adds a --force command-line argument.
bad_commands = dict(
@@ -378,8 +388,8 @@ def parse_setuppy_commands():
)
bad_commands['nosetests'] = bad_commands['test']
for command in ('upload_docs', 'easy_install', 'bdist', 'bdist_dumb',
- 'register', 'check', 'install_data', 'install_headers',
- 'install_lib', 'install_scripts', ):
+ 'register', 'check', 'install_data', 'install_headers',
+ 'install_lib', 'install_scripts', ):
bad_commands[command] = "`setup.py %s` is not supported" % command
for command in bad_commands.keys():
@@ -399,7 +409,8 @@ def parse_setuppy_commands():
# If we got here, we didn't detect what setup.py command was given
import warnings
warnings.warn("Unrecognized setuptools command, proceeding with "
- "generating Cython sources and expanding templates", stacklevel=2)
+ "generating Cython sources and expanding templates",
+ stacklevel=2)
return True
@@ -434,25 +445,24 @@ def setup_package():
'f2py%s.%s = numpy.f2py.f2py2e:main' % sys.version_info[:2],
]
- cmdclass={"sdist": sdist_checked,
- }
+ cmdclass = {"sdist": sdist_checked, }
metadata = dict(
- name = 'numpy',
- maintainer = "NumPy Developers",
- maintainer_email = "numpy-discussion@python.org",
- description = DOCLINES[0],
- long_description = "\n".join(DOCLINES[2:]),
- url = "https://www.numpy.org",
- author = "Travis E. Oliphant et al.",
- download_url = "https://pypi.python.org/pypi/numpy",
+ name='numpy',
+ maintainer="NumPy Developers",
+ maintainer_email="numpy-discussion@python.org",
+ description=DOCLINES[0],
+ long_description="\n".join(DOCLINES[2:]),
+ url="https://www.numpy.org",
+ author="Travis E. Oliphant et al.",
+ download_url="https://pypi.python.org/pypi/numpy",
project_urls={
"Bug Tracker": "https://github.com/numpy/numpy/issues",
"Documentation": get_docs_url(),
"Source Code": "https://github.com/numpy/numpy",
},
- license = 'BSD',
+ license='BSD',
classifiers=[_f for _f in CLASSIFIERS.split('\n') if _f],
- platforms = ["Windows", "Linux", "Solaris", "Mac OS-X", "Unix"],
+ platforms=["Windows", "Linux", "Solaris", "Mac OS-X", "Unix"],
test_suite='pytest',
cmdclass=cmdclass,
python_requires='>=3.6',
@@ -473,8 +483,7 @@ def setup_package():
# patches distutils, even though we don't use it
import setuptools # noqa: F401
from numpy.distutils.core import setup
- cwd = os.path.abspath(os.path.dirname(__file__))
- if not 'sdist' in sys.argv:
+ if 'sdist' not in sys.argv:
# Generate Cython sources, unless we're generating an sdist
generate_cython()
diff --git a/test_requirements.txt b/test_requirements.txt
index 171206435..07d7f28c1 100644
--- a/test_requirements.txt
+++ b/test_requirements.txt
@@ -1,10 +1,10 @@
cython==0.29.21
wheel
setuptools<49.2.0
-hypothesis==5.23.2
-pytest==5.4.3
+hypothesis==5.36.1
+pytest==6.0.2
pytz==2020.1
-pytest-cov==2.8.1
+pytest-cov==2.10.1
pickle5; python_version == '3.7'
pickle5; python_version == '3.6' and platform_python_implementation != 'PyPy'
# for numpy.random.test.test_extending
diff --git a/tools/openblas_support.py b/tools/openblas_support.py
index cbb6a5e43..3d966a8f6 100644
--- a/tools/openblas_support.py
+++ b/tools/openblas_support.py
@@ -10,39 +10,88 @@ import zipfile
from tempfile import mkstemp, gettempdir
from urllib.request import urlopen, Request
+from urllib.error import HTTPError
-OPENBLAS_V = '0.3.9'
+OPENBLAS_V = '0.3.12'
# Temporary build of OpenBLAS to test a fix for dynamic detection of CPU
-OPENBLAS_LONG = 'v0.3.7-527-g79fd006c' # the 0.3.7 is misleading
+OPENBLAS_LONG = 'v0.3.12'
BASE_LOC = 'https://anaconda.org/multibuild-wheels-staging/openblas-libs'
BASEURL = f'{BASE_LOC}/{OPENBLAS_LONG}/download'
-ARCHITECTURES = ['', 'windows', 'darwin', 'aarch64', 'x86_64', 'i686', 'ppc64le', 's390x']
+ARCHITECTURES = ['', 'windows', 'darwin', 'aarch64', 'x86_64',
+ 'i686', 'ppc64le', 's390x']
sha256_vals = {
-"openblas-v0.3.7-527-g79fd006c-win_amd64-gcc_7_1_0.zip": "7249d68c02e6b6339e06edfeab1fecddf29ee1e67a3afaa77917c320c43de840",
-"openblas64_-v0.3.7-527-g79fd006c-win_amd64-gcc_7_1_0.zip": "6488e0961a5926e47242f63b63b41cfdd661e6f1d267e8e313e397cde4775c17",
-"openblas-v0.3.7-527-g79fd006c-win32-gcc_7_1_0.zip": "5fb0867ca70b1d0fdbf68dd387c0211f26903d74631420e4aabb49e94aa3930d",
-"openblas-v0.3.7-527-g79fd006c-macosx_10_9_x86_64-gf_1becaaa.tar.gz": "69434bd626bbc495da9ce8c36b005d140c75e3c47f94e88c764a199e820f9259",
-"openblas64_-v0.3.7-527-g79fd006c-macosx_10_9_x86_64-gf_1becaaa.tar.gz": "093f6d953e3fa76a86809be67bd1f0b27656671b5a55b233169cfaa43fd63e22",
-"openblas-v0.3.7-527-g79fd006c-manylinux2014_aarch64.tar.gz": "42676c69dc48cd6e412251b39da6b955a5a0e00323ddd77f9137f7c259d35319",
-"openblas64_-v0.3.7-527-g79fd006c-manylinux2014_aarch64.tar.gz": "5aec167af4052cf5e9e3e416c522d9794efabf03a2aea78b9bb3adc94f0b73d8",
-"openblas-v0.3.7-527-g79fd006c-manylinux2010_x86_64.tar.gz": "fa67c6cc29d4cc5c70a147c80526243239a6f95fc3feadcf83a78176cd9c526b",
-"openblas64_-v0.3.7-527-g79fd006c-manylinux2010_x86_64.tar.gz": "9ad34e89a5307dcf5823bf5c020580d0559a0c155fe85b44fc219752e61852b0",
-"openblas-v0.3.7-527-g79fd006c-manylinux2010_i686.tar.gz": "0b8595d316c8b7be84ab1f1d5a0c89c1b35f7c987cdaf61d441bcba7ab4c7439",
-"openblas-v0.3.7-527-g79fd006c-manylinux2014_ppc64le.tar.gz": "3e1c7d6472c34e7210e3605be4bac9ddd32f613d44297dc50cf2d067e720c4a9",
-"openblas64_-v0.3.7-527-g79fd006c-manylinux2014_ppc64le.tar.gz": "a0885873298e21297a04be6cb7355a585df4fa4873e436b4c16c0a18fc9073ea",
-"openblas-v0.3.7-527-g79fd006c-manylinux2014_s390x.tar.gz": "79b454320817574e20499d58f05259ed35213bea0158953992b910607b17f240",
-"openblas64_-v0.3.7-527-g79fd006c-manylinux2014_s390x.tar.gz": "9fddbebf5301518fc4a5d2022a61886544a0566868c8c014359a1ee6b17f2814",
-"openblas-v0.3.7-527-g79fd006c-manylinux1_i686.tar.gz": "24fb92684ec4676185fff5c9340f50c3db6075948bcef760e9c715a8974e4680",
-"openblas-v0.3.7-527-g79fd006c-manylinux1_x86_64.tar.gz": "ebb8236b57a1b4075fd5cdc3e9246d2900c133a42482e5e714d1e67af5d00e62",
-"openblas-v0.3.7-527-g79fd006c-manylinux1_i686.tar.gz": "24fb92684ec4676185fff5c9340f50c3db6075948bcef760e9c715a8974e4680",
-"openblas-v0.3.7-527-g79fd006c-manylinux1_x86_64.tar.gz": "ebb8236b57a1b4075fd5cdc3e9246d2900c133a42482e5e714d1e67af5d00e62",
-"openblas-v0.3.7-527-g79fd006c-manylinux1_i686.tar.gz": "24fb92684ec4676185fff5c9340f50c3db6075948bcef760e9c715a8974e4680",
-"openblas-v0.3.7-527-g79fd006c-manylinux1_x86_64.tar.gz": "ebb8236b57a1b4075fd5cdc3e9246d2900c133a42482e5e714d1e67af5d00e62",
+ "openblas-v0.3.7-527-g79fd006c-win_amd64-gcc_7_1_0.zip":
+ "7249d68c02e6b6339e06edfeab1fecddf29ee1e67a3afaa77917c320c43de840",
+ "openblas64_-v0.3.7-527-g79fd006c-win_amd64-gcc_7_1_0.zip":
+ "6488e0961a5926e47242f63b63b41cfdd661e6f1d267e8e313e397cde4775c17",
+ "openblas-v0.3.7-527-g79fd006c-win32-gcc_7_1_0.zip":
+ "5fb0867ca70b1d0fdbf68dd387c0211f26903d74631420e4aabb49e94aa3930d",
+ "openblas-v0.3.7-527-g79fd006c-macosx_10_9_x86_64-gf_1becaaa.tar.gz":
+ "69434bd626bbc495da9ce8c36b005d140c75e3c47f94e88c764a199e820f9259",
+ "openblas64_-v0.3.7-527-g79fd006c-macosx_10_9_x86_64-gf_1becaaa.tar.gz":
+ "093f6d953e3fa76a86809be67bd1f0b27656671b5a55b233169cfaa43fd63e22",
+ "openblas-v0.3.7-527-g79fd006c-manylinux2014_aarch64.tar.gz":
+ "42676c69dc48cd6e412251b39da6b955a5a0e00323ddd77f9137f7c259d35319",
+ "openblas64_-v0.3.7-527-g79fd006c-manylinux2014_aarch64.tar.gz":
+ "5aec167af4052cf5e9e3e416c522d9794efabf03a2aea78b9bb3adc94f0b73d8",
+ "openblas-v0.3.7-527-g79fd006c-manylinux2010_x86_64.tar.gz":
+ "fa67c6cc29d4cc5c70a147c80526243239a6f95fc3feadcf83a78176cd9c526b",
+ "openblas64_-v0.3.7-527-g79fd006c-manylinux2010_x86_64.tar.gz":
+ "9ad34e89a5307dcf5823bf5c020580d0559a0c155fe85b44fc219752e61852b0",
+ "openblas-v0.3.7-527-g79fd006c-manylinux2010_i686.tar.gz":
+ "0b8595d316c8b7be84ab1f1d5a0c89c1b35f7c987cdaf61d441bcba7ab4c7439",
+ "openblas-v0.3.7-527-g79fd006c-manylinux2014_ppc64le.tar.gz":
+ "3e1c7d6472c34e7210e3605be4bac9ddd32f613d44297dc50cf2d067e720c4a9",
+ "openblas64_-v0.3.7-527-g79fd006c-manylinux2014_ppc64le.tar.gz":
+ "a0885873298e21297a04be6cb7355a585df4fa4873e436b4c16c0a18fc9073ea",
+ "openblas-v0.3.7-527-g79fd006c-manylinux2014_s390x.tar.gz":
+ "79b454320817574e20499d58f05259ed35213bea0158953992b910607b17f240",
+ "openblas64_-v0.3.7-527-g79fd006c-manylinux2014_s390x.tar.gz":
+ "9fddbebf5301518fc4a5d2022a61886544a0566868c8c014359a1ee6b17f2814",
+ "openblas-v0.3.7-527-g79fd006c-manylinux1_i686.tar.gz":
+ "24fb92684ec4676185fff5c9340f50c3db6075948bcef760e9c715a8974e4680",
+ "openblas-v0.3.7-527-g79fd006c-manylinux1_x86_64.tar.gz":
+ "ebb8236b57a1b4075fd5cdc3e9246d2900c133a42482e5e714d1e67af5d00e62",
+ "openblas-v0.3.10-win_amd64-gcc_7_1_0.zip":
+ "e5356a2aa4aa7ed9233b2ca199fdd445f55ba227f004ebc63071dfa2426e9b09",
+ "openblas64_-v0.3.10-win_amd64-gcc_7_1_0.zip":
+ "aea3f9c8bdfe0b837f0d2739a6c755b12b6838f6c983e4ede71b4e1b576e6e77",
+ "openblas-v0.3.10-win32-gcc_7_1_0.zip":
+ "af1ad3172b23f7c6ef2234151a71d3be4d92010dad4dfb25d07cf5a20f009202",
+ "openblas64_-v0.3.10-macosx_10_9_x86_64-gf_1becaaa.tar.gz":
+ "38b61c58d63048731d6884fea7b63f8cbd610e85b138c6bac0e39fd77cd4699b",
+ "openblas-v0.3.10-manylinux2014_aarch64.tar.gz":
+ "c4444b9836ec26f7772fae02851961bf73177ff2aa436470e56fab8a1ef8d405",
+ "openblas-v0.3.10-manylinux2010_x86_64.tar.gz":
+ "cb7988c4a015aece9c49b1169f51c4ac2287fb9aab8114c8ab67792138ffc85e",
+ "openblas-v0.3.10-manylinux2010_i686.tar.gz":
+ "dc637801dd80ebd6394ea8b4a97f8858e4224870ea9214de08bebbdddd8e206e",
+ "openblas-v0.3.10-manylinux1_x86_64.tar.gz":
+ "ec1f9e9b2a62d5cb9e2634b88ee2da7cb6b07702d5a0e8b190d680a31adfa23a",
+ "openblas-v0.3.10-manylinux1_i686.tar.gz":
+ "b13d9d14e6bd452c0fbadb5cd5fda05b98b1e14043edb13ead90694d4cc07f0e",
+ "openblas-v0.3.10-manylinux2014_ppc64le.tar.gz":
+ "1cbc8176986099cf0cbb8f64968d5a14880d602d4b3c59a91d75b69b8760cde3",
+ "openblas-v0.3.10-manylinux2014_s390x.tar.gz":
+ "fa6722f0b12507ab0a65f38501ed8435b573df0adc0b979f47cdc4c9e9599475",
+ "openblas-v0.3.10-macosx_10_9_x86_64-gf_1becaaa.tar.gz":
+ "c6940b5133e687ae7a4f9c7c794f6a6d92b619cf41e591e5db07aab5da118199",
+ "openblas64_-v0.3.10-manylinux2014_s390x.tar.gz":
+ "e0347dd6f3f3a27d2f5e76d382e8a4a68e2e92f5f6a10e54ef65c7b14b44d0e8",
+ "openblas64_-v0.3.10-manylinux2014_ppc64le.tar.gz":
+ "4b96a51ac767ec0aabb821c61bcd3420e82e987fc93f7e1f85aebb2a845694eb",
+ "openblas64_-v0.3.10-manylinux2010_x86_64.tar.gz":
+ "f68fea21fbc73d06b7566057cad2ed8c7c0eb71fabf9ed8a609f86e5bc60ce5e",
+ "openblas64_-v0.3.10-manylinux2014_aarch64.tar.gz":
+ "15e6eed8cb0df8b88e52baa136ffe1769c517e9de7bcdfd81ec56420ae1069e9",
+ "openblas64_-v0.3.10-win_amd64-gcc_7_1_0.zip":
+ "aea3f9c8bdfe0b837f0d2739a6c755b12b6838f6c983e4ede71b4e1b576e6e77",
}
IS_32BIT = sys.maxsize < 2**32
+
def get_arch():
if platform.system() == 'Windows':
ret = 'windows'
@@ -53,10 +102,11 @@ def get_arch():
# What do 32 bit machines report?
# If they are a docker, they can report x86_64
if 'x86' in ret and IS_32BIT:
- arch = 'i686'
+ ret = 'i686'
assert ret in ARCHITECTURES, f'invalid architecture {ret}'
return ret
+
def get_ilp64():
if os.environ.get("NPY_USE_BLAS_ILP64", "0") == "0":
return None
@@ -64,6 +114,7 @@ def get_ilp64():
raise RuntimeError("NPY_USE_BLAS_ILP64 set on 32-bit arch")
return "64_"
+
def get_manylinux(arch):
if arch in ('x86_64', 'i686'):
default = '2010'
@@ -75,13 +126,13 @@ def get_manylinux(arch):
return ret
-def download_openblas(target, arch, ilp64):
+def download_openblas(target, arch, ilp64, is_32bit):
ml_ver = get_manylinux(arch)
fnsuffix = {None: "", "64_": "64_"}[ilp64]
filename = ''
- headers = {'User-Agent': ('Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 ; '
- '(KHTML, like Gecko) Chrome/41.0.2228.0 '
- 'Safari/537.3')}
+ headers = {'User-Agent':
+ ('Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 ; '
+ '(KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.3')}
if arch in ('aarch64', 'ppc64le', 's390x', 'x86_64', 'i686'):
suffix = f'manylinux{ml_ver}_{arch}.tar.gz'
filename = f'{BASEURL}/openblas{fnsuffix}-{OPENBLAS_LONG}-{suffix}'
@@ -91,16 +142,20 @@ def download_openblas(target, arch, ilp64):
filename = f'{BASEURL}/openblas{fnsuffix}-{OPENBLAS_LONG}-{suffix}'
typ = 'tar.gz'
elif arch == 'windows':
- if IS_32BIT:
- suffix = 'win32-gcc_7_1_0.zip'
+ if is_32bit:
+ suffix = 'win32-gcc_8_1_0.zip'
else:
- suffix = 'win_amd64-gcc_7_1_0.zip'
+ suffix = 'win_amd64-gcc_8_1_0.zip'
filename = f'{BASEURL}/openblas{fnsuffix}-{OPENBLAS_LONG}-{suffix}'
typ = 'zip'
if not filename:
return None
req = Request(url=filename, headers=headers)
- response = urlopen(req)
+ try:
+ response = urlopen(req)
+ except HTTPError:
+ print(f'Could not download "{filename}"', file=sys.stderr)
+ raise
length = response.getheader('content-length')
if response.status != 200:
print(f'Could not download "{filename}"', file=sys.stderr)
@@ -110,18 +165,21 @@ def download_openblas(target, arch, ilp64):
# Verify hash
key = os.path.basename(filename)
sha256_returned = hashlib.sha256(data).hexdigest()
- if key not in sha256_vals:
- raise ValueError(
- f'key "{key}" with hash "{sha256_returned}" not in sha256_vals')
- sha256_expected = sha256_vals[key]
- if sha256_returned != sha256_expected:
- raise ValueError(f'sha256 hash mismatch for filename {filename}')
+ if 0:
+ if key not in sha256_vals:
+ raise ValueError(
+ f'\nkey "{key}" with hash "{sha256_returned}" not in sha256_vals\n')
+ sha256_expected = sha256_vals[key]
+ if sha256_returned != sha256_expected:
+ # print(f'\nkey "{key}" with hash "{sha256_returned}" mismatch\n')
+ raise ValueError(f'sha256 hash mismatch for filename {filename}')
print("Saving to file", file=sys.stderr)
with open(target, 'wb') as fid:
fid.write(data)
return typ
-def setup_openblas(arch=get_arch(), ilp64=get_ilp64()):
+
+def setup_openblas(arch=get_arch(), ilp64=get_ilp64(), is_32bit=IS_32BIT):
'''
Download and setup an openblas library for building. If successful,
the configuration script will find it automatically.
@@ -135,24 +193,25 @@ def setup_openblas(arch=get_arch(), ilp64=get_ilp64()):
_, tmp = mkstemp()
if not arch:
raise ValueError('unknown architecture')
- typ = download_openblas(tmp, arch, ilp64)
+ typ = download_openblas(tmp, arch, ilp64, is_32bit)
if not typ:
return ''
if arch == 'windows':
if not typ == 'zip':
- return 'expecting to download zipfile on windows, not %s' % str(typ)
+ return f'expecting to download zipfile on windows, not {typ}'
return unpack_windows_zip(tmp)
else:
if not typ == 'tar.gz':
return 'expecting to download tar.gz, not %s' % str(typ)
return unpack_targz(tmp)
+
def unpack_windows_zip(fname):
with zipfile.ZipFile(fname, 'r') as zf:
# Get the openblas.a file, but not openblas.dll.a nor openblas.dev.a
lib = [x for x in zf.namelist() if OPENBLAS_LONG in x and
- x.endswith('a') and not x.endswith('dll.a') and
- not x.endswith('dev.a')]
+ x.endswith('a') and not x.endswith('dll.a') and
+ not x.endswith('dev.a')]
if not lib:
return 'could not find libopenblas_%s*.a ' \
'in downloaded zipfile' % OPENBLAS_LONG
@@ -161,6 +220,7 @@ def unpack_windows_zip(fname):
fid.write(zf.read(lib[0]))
return target
+
def unpack_targz(fname):
target = os.path.join(gettempdir(), 'openblas')
if not os.path.exists(target):
@@ -171,6 +231,7 @@ def unpack_targz(fname):
extract_tarfile_to(zf, target, prefix)
return target
+
def extract_tarfile_to(tarfileobj, target_path, archive_path):
"""Extract TarFile contents under archive_path/ to target_path/"""
@@ -194,6 +255,7 @@ def extract_tarfile_to(tarfileobj, target_path, archive_path):
tarfileobj.extractall(target_path, members=get_members())
+
def make_init(dirname):
'''
Create a _distributor_init.py file for OpenBlas
@@ -228,33 +290,51 @@ def make_init(dirname):
DLL_filenames.append(filename)
if len(DLL_filenames) > 1:
import warnings
- warnings.warn("loaded more than 1 DLL from .libs:\\n%s" %
- "\\n".join(DLL_filenames),
+ warnings.warn("loaded more than 1 DLL from .libs:"
+ "\\n%s" % "\\n".join(DLL_filenames),
stacklevel=1)
"""))
+
def test_setup(arches):
'''
Make sure all the downloadable files exist and can be opened
'''
def items():
+ """ yields all combinations of arch, ilp64, is_32bit
+ """
for arch in arches:
- yield arch, None
- if arch not in ('i686'):
- yield arch, '64_'
+ yield arch, None, False
+ if arch not in ('i686',):
+ yield arch, '64_', False
+ if arch in ('windows',):
+ yield arch, None, True
+ if arch in ('i686', 'x86_64'):
+ oldval = os.environ.get('MB_ML_VER', None)
+ os.environ['MB_ML_VER'] = '1'
+ yield arch, None, False
+ # Once we create x86_64 and i686 manylinux2014 wheels...
+ # os.environ['MB_ML_VER'] = '2014'
+ # yield arch, None, False
+ if oldval:
+ os.environ['MB_ML_VER'] = oldval
+ else:
+ os.environ.pop('MB_ML_VER')
errs = []
- for arch, ilp64 in items():
+ for arch, ilp64, is_32bit in items():
if arch == '':
continue
-
+ if arch not in arches:
+ continue
target = None
try:
try:
- target = setup_openblas(arch, ilp64)
+ target = setup_openblas(arch, ilp64, is_32bit)
except Exception as e:
- print(f'Could not setup {arch}:')
- print(str(e))
+ print(f'Could not setup {arch} with ilp64 {ilp64}, '
+ f'32bit {is_32bit}:')
+ print(e)
errs.append(e)
continue
if not target:
@@ -290,28 +370,31 @@ def test_version(expected_version, ilp64=get_ilp64()):
get_config = dll.openblas_get_config64_
else:
get_config = dll.openblas_get_config
- get_config.restype=ctypes.c_char_p
+ get_config.restype = ctypes.c_char_p
res = get_config()
print('OpenBLAS get_config returned', str(res))
if not expected_version:
expected_version = OPENBLAS_V
check_str = b'OpenBLAS %s' % expected_version.encode()
print(check_str)
- assert check_str in res, '%s not found in %s' %(expected_version, res)
+ assert check_str in res, f'{expected_version} not found in {res}'
if ilp64:
assert b"USE64BITINT" in res
else:
assert b"USE64BITINT" not in res
+
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(
- description='Download and expand an OpenBLAS archive for this ' \
+ description='Download and expand an OpenBLAS archive for this '
'architecture')
parser.add_argument('--test', nargs='*', default=None,
- help='Test different architectures. "all", or any of %s' % ARCHITECTURES)
+ help='Test different architectures. "all", or any of '
+ f'{ARCHITECTURES}')
parser.add_argument('--check_version', nargs='?', default='',
- help='Check provided OpenBLAS version string against available OpenBLAS')
+ help='Check provided OpenBLAS version string '
+ 'against available OpenBLAS')
args = parser.parse_args()
if args.check_version != '':
test_version(args.check_version)