Skip to content

Commit 61a9953

Browse files
committed
Fix some test warnings
1 parent d733683 commit 61a9953

File tree

9 files changed

+46
-45
lines changed

9 files changed

+46
-45
lines changed

CHANGELOG

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ The rules for CHANGELOG file:
1313

1414
Unreleased
1515
----------
16+
- Fix a couple of ``DeprecationWarnings`` and ``UserWarnings`` (#280)
1617
- Fix PCovC scaling (#270)
1718
- Refactor of reconstruction measures(#275)
1819
- Code cleanup of Base classes (#264)

pyproject.toml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,13 +83,16 @@ include = [
8383
output = 'tests/coverage.xml'
8484

8585
[tool.pytest.ini_options]
86-
testpaths = ["tests"]
8786
addopts = [
8887
"--cov",
8988
"--cov-append",
9089
"--cov-report=",
9190
"--import-mode=append",
9291
]
92+
# filterwarnings = [
93+
# "error",
94+
# ]
95+
testpaths = ["tests"]
9396

9497
[tool.ruff]
9598
exclude = ["docs/src/examples/", "src/torchpme/_version.py"]

src/skmatter/_selection.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1019,7 +1019,7 @@ def _update_hausdorff(self, X, y, last_selected):
10191019
)
10201020

10211021
# update in-place the Hausdorff distance list
1022-
np.minimum(self.hausdorff_, new_dist, self.hausdorff_)
1022+
np.minimum(self.hausdorff_, new_dist, out=self.hausdorff_)
10231023

10241024
def _update_post_selection(self, X, y, last_selected):
10251025
"""
@@ -1163,7 +1163,7 @@ def _update_hausdorff(self, X, y, last_selected):
11631163
)
11641164

11651165
# update in-place the Hausdorff distance list
1166-
np.minimum(self.hausdorff_, new_dist, self.hausdorff_)
1166+
np.minimum(self.hausdorff_, new_dist, out=self.hausdorff_)
11671167

11681168
def _update_post_selection(self, X, y, last_selected):
11691169
"""Saves the most recent selections, increments the counter, and, recomputes

src/skmatter/decomposition/_kpcov.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from sklearn.decomposition._pca import _infer_dimension
1313
from sklearn.utils import check_random_state
1414
from sklearn.utils._arpack import _init_arpack_v0
15-
from sklearn.utils.extmath import randomized_svd, stable_cumsum, svd_flip
15+
from sklearn.utils.extmath import randomized_svd, svd_flip
1616
from sklearn.utils.validation import check_is_fitted
1717
from sklearn.utils.validation import validate_data
1818
from sklearn.metrics.pairwise import pairwise_kernels
@@ -258,7 +258,7 @@ def _decompose_full(self, mat):
258258
# side='right' ensures that number of features selected
259259
# their variance is always greater than self.n_components_ float
260260
# passed. More discussion in issue: #15669
261-
ratio_cumsum = stable_cumsum(explained_variance_ratio_)
261+
ratio_cumsum = np.cumulative_sum(explained_variance_ratio_)
262262
self.n_components_ = (
263263
np.searchsorted(ratio_cumsum, self.n_components_, side="right") + 1
264264
)

src/skmatter/decomposition/_pcov.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
from sklearn.linear_model._base import LinearModel
1515
from sklearn.utils import check_random_state
1616
from sklearn.utils._arpack import _init_arpack_v0
17-
from sklearn.utils.extmath import randomized_svd, stable_cumsum, svd_flip
17+
from sklearn.utils.extmath import randomized_svd, svd_flip
1818
from sklearn.utils.validation import check_is_fitted
1919

2020
from skmatter.utils import pcovr_covariance, pcovr_kernel
@@ -288,7 +288,7 @@ def _decompose_full(self, mat):
288288
# side='right' ensures that number of features selected
289289
# their variance is always greater than self.n_components_ float
290290
# passed. More discussion in issue: #15669
291-
ratio_cumsum = stable_cumsum(explained_variance_ratio_)
291+
ratio_cumsum = np.cumulative_sum(explained_variance_ratio_)
292292
self.n_components_ = (
293293
np.searchsorted(ratio_cumsum, self.n_components_, side="right") + 1
294294
)

src/skmatter/sample_selection/_voronoi_fps.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -312,7 +312,7 @@ def _update_post_selection(self, X, y, last_selected):
312312

313313
updated_points = np.where(self.new_dist_ < self.hausdorff_)[0]
314314
np.minimum(
315-
self.hausdorff_, self.new_dist_, self.hausdorff_, casting="unsafe"
315+
self.hausdorff_, self.new_dist_, out=self.hausdorff_, casting="unsafe"
316316
)
317317
else:
318318
updated_points = np.array([])

tests/test_kernel_pcovc.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from sklearn.preprocessing import StandardScaler
1111
from sklearn.linear_model import LogisticRegression, RidgeClassifier
1212
from sklearn.metrics.pairwise import pairwise_kernels
13+
import pytest
1314

1415
from skmatter.decomposition import KernelPCovC
1516

@@ -337,6 +338,7 @@ def test_scale_z_parameter(self):
337338

338339
kpcovc_unscaled = self.model(scale_z=False)
339340
kpcovc_unscaled.fit(self.X, self.Y)
341+
340342
assert not np.allclose(kpcovc_scaled.pkt_, kpcovc_unscaled.pkt_)
341343

342344
def test_z_scaling(self):
@@ -345,11 +347,7 @@ def test_z_scaling(self):
345347
if it is.
346348
"""
347349
kpcovc = self.model(n_components=2, scale_z=True)
348-
349-
with warnings.catch_warnings():
350-
kpcovc.fit(self.X, self.Y)
351-
warnings.simplefilter("error")
352-
self.assertEqual(1 + 1, 2)
350+
kpcovc.fit(self.X, self.Y)
353351

354352
kpcovc = self.model(n_components=2, scale_z=False, z_mean_tol=0, z_var_tol=0)
355353

tests/test_pcovc.py

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from sklearn.naive_bayes import GaussianNB
1111
from sklearn.preprocessing import StandardScaler
1212
from sklearn.utils.validation import check_X_y
13+
import pytest
1314

1415
from skmatter.decomposition import PCovC
1516

@@ -186,9 +187,10 @@ def test_select_sample_space(self):
186187
n_samples = 2
187188

188189
# select range where there are at least 2 classes in Y
189-
pcovc.fit(self.X[49 : 49 + n_samples], self.Y[49 : 49 + n_samples])
190+
with pytest.warns(match="class does not automatically center data"):
191+
pcovc.fit(self.X[49 : 49 + n_samples], self.Y[49 : 49 + n_samples])
190192

191-
self.assertTrue(pcovc.space_ == "sample")
193+
assert pcovc.space_ == "sample"
192194

193195
def test_bad_space(self):
194196
"""
@@ -397,25 +399,20 @@ def test_centering(self):
397399
"""
398400
pcovc = self.model(n_components=2, tol=1e-12)
399401
X = self.X.copy() + np.random.uniform(-1, 1, self.X.shape[1])
400-
with warnings.catch_warnings(record=True) as w:
402+
m = (
403+
"This class does not automatically center data, and your data mean is "
404+
"greater than the supplied tolerance."
405+
)
406+
with pytest.warns(match=m):
401407
pcovc.fit(X, self.Y)
402-
self.assertEqual(
403-
str(w[0].message),
404-
"This class does not automatically center data, and your data "
405-
"mean is greater than the supplied tolerance.",
406-
)
407408

408409
def test_z_scaling(self):
409410
"""
410411
Check that PCovC raises a warning if Z is not of scale, and does not
411412
if it is.
412413
"""
413414
pcovc = self.model(n_components=2, scale_z=True)
414-
415-
with warnings.catch_warnings():
416-
pcovc.fit(self.X, self.Y)
417-
warnings.simplefilter("error")
418-
self.assertEqual(1 + 1, 2)
415+
pcovc.fit(self.X, self.Y)
419416

420417
pcovc = self.model(n_components=2, scale_z=False, z_mean_tol=0, z_var_tol=0)
421418

@@ -577,9 +574,11 @@ def test_incompatible_classifier(self):
577574
def test_none_classifier(self):
578575
pcovc = PCovC(mixing=0.5, classifier=None)
579576

580-
pcovc.fit(self.X, self.Y)
581-
self.assertTrue(pcovc.classifier is None)
582-
self.assertTrue(pcovc.classifier_ is not None)
577+
with pytest.warns(match="class does not automatically scale Z"):
578+
pcovc.fit(self.X, self.Y)
579+
580+
assert pcovc.classifier is None
581+
assert pcovc.classifier_ is not None
583582

584583
def test_incompatible_coef_shape(self):
585584
cl_multi = LogisticRegression()
@@ -617,6 +616,7 @@ def test_scale_z_parameter(self):
617616

618617
pcovc_unscaled = self.model(scale_z=False)
619618
pcovc_unscaled.fit(self.X, self.Y)
619+
620620
assert not np.allclose(
621621
pcovc_scaled.singular_values_, pcovc_unscaled.singular_values_
622622
)

tests/test_pcovr.py

Lines changed: 15 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import unittest
2-
import warnings
32

43
import numpy as np
54
from sklearn import exceptions
@@ -9,6 +8,7 @@
98
from sklearn.linear_model import Ridge
109
from sklearn.preprocessing import StandardScaler
1110
from sklearn.utils.validation import check_X_y
11+
import pytest
1212

1313
from skmatter.decomposition import PCovR
1414

@@ -167,9 +167,11 @@ def test_select_sample_space(self):
167167
pcovr = self.model(n_components=2, tol=1e-12)
168168

169169
n_samples = self.X.shape[1] - 1
170-
pcovr.fit(self.X[:n_samples], self.Y[:n_samples])
171170

172-
self.assertTrue(pcovr.space_ == "sample")
171+
with pytest.warns(match="class does not automatically center data"):
172+
pcovr.fit(self.X[:n_samples], self.Y[:n_samples])
173+
174+
assert pcovr.space_ == "sample"
173175

174176
def test_bad_space(self):
175177
"""
@@ -280,13 +282,11 @@ def test_good_n_components(self):
280282

281283
def test_bad_n_components(self):
282284
"""Check that PCovR will not work with any prohibited values of n_components."""
283-
with self.assertRaises(ValueError) as cm:
284-
pcovr = self.model(n_components="mle", svd_solver="full")
285-
pcovr.fit(self.X[:2], self.Y[:2])
286-
self.assertEqual(
287-
str(cm.exception),
288-
"n_components='mle' is only supported if n_samples >= n_features",
289-
)
285+
pcovr = self.model(n_components="mle", svd_solver="full")
286+
m = "n_components='mle' is only supported if n_samples >= n_features"
287+
with pytest.raises(ValueError, match=m):
288+
with pytest.warns(match="class does not automatically center data"):
289+
pcovr.fit(self.X[:2], self.Y[:2])
290290

291291
with self.subTest(type="negative_ncomponents"):
292292
with self.assertRaises(ValueError) as cm:
@@ -376,13 +376,12 @@ def test_centering(self):
376376
"""
377377
pcovr = self.model(n_components=2, tol=1e-12)
378378
X = self.X.copy() + np.random.uniform(-1, 1, self.X.shape[1])
379-
with warnings.catch_warnings(record=True) as w:
379+
m = (
380+
"This class does not automatically center data, and your data mean is "
381+
"greater than the supplied tolerance."
382+
)
383+
with pytest.warns(match=m):
380384
pcovr.fit(X, self.Y)
381-
self.assertEqual(
382-
str(w[0].message),
383-
"This class does not automatically center data, and your data mean is "
384-
"greater than the supplied tolerance.",
385-
)
386385

387386
def test_T_shape(self):
388387
"""Check that PCovR returns a latent space projection consistent with the shape

0 commit comments

Comments
 (0)