scipy: enable python versions 3.8 and 3.9. ndimage import fix in stats module (#6148)

Co-authored-by: Aleksei Gerasimov <aleksei.gerasimov@vutbr.cz>
This commit is contained in:
iam-git
2021-09-11 09:17:06 +02:00
committed by GitHub
parent f3a4fe952d
commit cb9092f891
2 changed files with 104 additions and 23 deletions

View File

@@ -47,3 +47,85 @@ index 7e44565..3b0ead2 100644
--
2.30.2
From 0450b42c56cb1ed5d80a81447a766a4cfa757e63 Mon Sep 17 00:00:00 2001
From: Aleksei Gerasimov <aleksei.gerasimov@vutbr.cz>
Date: Tue, 24 Aug 2021 15:19:52 +0200
Subject: comment out ndimage import. Only one function (_threshold_mgc_map) is
directly affected.
diff --git a/scipy/stats/stats.py b/scipy/stats/stats.py
index 2b7dac8..c460637 100644
--- a/scipy/stats/stats.py
+++ b/scipy/stats/stats.py
@@ -172,7 +172,7 @@ import numpy as np
from numpy import array, asarray, ma
from scipy.spatial.distance import cdist
-from scipy.ndimage import measurements
+#from scipy.ndimage import measurements
from scipy._lib._util import (_lazywhere, check_random_state, MapWrapper,
rng_integers, float_factorial)
import scipy.special as special
@@ -5208,32 +5208,33 @@ def _threshold_mgc_map(stat_mgc_map, samp_size):
sig_connect : ndarray
A binary matrix with 1's indicating the significant region.
"""
- m, n = stat_mgc_map.shape
-
- # 0.02 is simply an empirical threshold, this can be set to 0.01 or 0.05
- # with varying levels of performance. Threshold is based on a beta
- # approximation.
- per_sig = 1 - (0.02 / samp_size) # Percentile to consider as significant
- threshold = samp_size * (samp_size - 3)/4 - 1/2 # Beta approximation
- threshold = distributions.beta.ppf(per_sig, threshold, threshold) * 2 - 1
-
- # the global scale at is the statistic calculated at maximial nearest
- # neighbors. Threshold is the maximium on the global and local scales
- threshold = max(threshold, stat_mgc_map[m - 1][n - 1])
-
- # find the largest connected component of significant correlations
- sig_connect = stat_mgc_map > threshold
- if np.sum(sig_connect) > 0:
- sig_connect, _ = measurements.label(sig_connect)
- _, label_counts = np.unique(sig_connect, return_counts=True)
-
- # skip the first element in label_counts, as it is count(zeros)
- max_label = np.argmax(label_counts[1:]) + 1
- sig_connect = sig_connect == max_label
- else:
- sig_connect = np.array([[False]])
-
- return sig_connect
+ raise ImportError("Haiku's package of scipy does not contain ndimage module")
+# m, n = stat_mgc_map.shape
+#
+# # 0.02 is simply an empirical threshold, this can be set to 0.01 or 0.05
+# # with varying levels of performance. Threshold is based on a beta
+# # approximation.
+# per_sig = 1 - (0.02 / samp_size) # Percentile to consider as significant
+# threshold = samp_size * (samp_size - 3)/4 - 1/2 # Beta approximation
+# threshold = distributions.beta.ppf(per_sig, threshold, threshold) * 2 - 1
+#
+# # the global scale at is the statistic calculated at maximial nearest
+# # neighbors. Threshold is the maximium on the global and local scales
+# threshold = max(threshold, stat_mgc_map[m - 1][n - 1])
+#
+# # find the largest connected component of significant correlations
+# sig_connect = stat_mgc_map > threshold
+# if np.sum(sig_connect) > 0:
+# sig_connect, _ = measurements.label(sig_connect)
+# _, label_counts = np.unique(sig_connect, return_counts=True)
+#
+# # skip the first element in label_counts, as it is count(zeros)
+# max_label = np.argmax(label_counts[1:]) + 1
+# sig_connect = sig_connect == max_label
+# else:
+# sig_connect = np.array([[False]])
+#
+# return sig_connect
def _smooth_mgc_map(sig_connect, stat_mgc_map):
--
2.30.2

View File

@@ -11,16 +11,13 @@ HOMEPAGE="https://www.scipy.org/"
COPYRIGHT=" 2001-2002 Enthought, Inc.
2003-2021 SciPy Developers"
LICENSE="BSD (3-clause)"
REVISION="1"
REVISION="2"
SOURCE_URI="https://github.com/scipy/scipy/releases/download/v$portVersion/scipy-$portVersion.tar.xz"
CHECKSUM_SHA256="3851fdcb1e6877241c3377aa971c85af0d44f90c57f4dd4e54e1b2bbd742635e"
SOURCE_DIR="scipy-$portVersion"
PATCHES="scipy-$portVersion.patchset"
ARCHITECTURES="!x86_gcc2 !x86_64"
# scipy can be build directly on x86
# but I was unable to make recipe do
# in the same way as "setarch x86" do
ARCHITECTURES="!x86_gcc2 x86_64"
SECONDARY_ARCHITECTURES="x86"
PROVIDES="
@@ -37,8 +34,19 @@ REQUIRES="
numpy$secondaryArchSuffix
"
PYTHON_PACKAGES=(python3)
PYTHON_VERSIONS=(3.7)
BUILD_REQUIRES="
haiku${secondaryArchSuffix}_devel
devel:libcblas$secondaryArchSuffix
devel:libarpack$secondaryArchSuffix
devel:libblis$secondaryArchSuffix
devel:liblapack$secondaryArchSuffix
devel:libopenblas$secondaryArchSuffix
devel:libumfpack$secondaryArchSuffix
"
PYTHON_PACKAGES=(python3 python38 python39)
PYTHON_VERSIONS=(3.7 3.8 3.9)
for i in "${!PYTHON_PACKAGES[@]}"; do
pythonPackage=${PYTHON_PACKAGES[i]}
pythonVersion=${PYTHON_VERSIONS[$i]}
@@ -51,29 +59,20 @@ REQUIRES_$pythonPackage=\"\
cmd:f2py$pythonVersion\n\
cmd:python$pythonVersion\
\""
done
BUILD_REQUIRES="
haiku${secondaryArchSuffix}_devel
devel:libcblas$secondaryArchSuffix
devel:libarpack$secondaryArchSuffix
devel:libblis$secondaryArchSuffix
devel:liblapack$secondaryArchSuffix
devel:libopenblas$secondaryArchSuffix
devel:libumfpack$secondaryArchSuffix
pybind11${secondaryArchSuffix}_python3
numpy${secondaryArchSuffix}_python3
setuptools_python3
BUILD_REQUIRES="$BUILD_REQUIRES
pybind11${secondaryArchSuffix}_$pythonPackage
numpy${secondaryArchSuffix}_$pythonPackage
setuptools_$pythonPackage
"
BUILD_PREREQUIRES="
BUILD_PREREQUIRES="$BUILD_PREREQUIRES
cmd:g++$secondaryArchSuffix
cmd:gfortran$secondaryArchSuffix
cmd:make
cmd:pkg_config$secondaryArchSuffix
cmd:python3.7
cmd:python$pythonVersion
cmd:swig # not sure if actually used
"
done
INSTALL()