Skip to content

Commit 1bf03b6

Browse files
Anton BjörklundAggrathon
Anton Björklund
authored andcommitted
fix some test errors and warnings
1 parent e634be0 commit 1bf03b6

File tree

5 files changed

+90
-81
lines changed

5 files changed

+90
-81
lines changed

setup.cfg

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
[metadata]
2-
name = SLISE
3-
version = 1.1.1
2+
name = slise
3+
version = 1.1.2
44
author = Anton Björklund
55
author_email = [email protected]
66
description = The SLISE algorithm for robust regression and explanations of black box models
77
long_description = file: README.md
88
long_description_content_type = text/markdown
9-
url = https://github.com/edahelsini/pyslise
9+
url = https://github.com/edahelsinki/pyslise
1010
classifiers =
1111
Programming Language :: Python :: 3
1212
License :: OSI Approved :: MIT License

slise/optimisation.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -188,7 +188,7 @@ def p(x, g, fx, xnorm, gnorm, step, k, num_eval, *args):
188188
raise error
189189
else:
190190
warn(
191-
"An optimisation step reaches the maximum number of iterations.",
191+
"LBFGS optimisation reaches the maximum number of iterations.",
192192
SliseWarning,
193193
)
194194
return x0

slise/plot.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -377,7 +377,10 @@ def fill_density(ax, X, x, n):
377377
if np.var(X) == 0:
378378
X = np.random.normal(X[0], 1e-8, len(X))
379379
kde1 = gaussian_kde(X, 0.2)
380-
kde2 = gaussian_kde(X[subset], 0.2)
380+
if np.any(subset):
381+
kde2 = gaussian_kde(X[subset], 0.2)
382+
else:
383+
kde2 = lambda x: x * 0
381384
lim = extended_limits(X, 0.1, 100)
382385
ax.plot(lim, kde1(lim), color="black", label="Dataset")
383386
ax.plot(

tests/test_plot.py

+9-9
Original file line numberDiff line numberDiff line change
@@ -9,17 +9,17 @@
99
def test_plot2d():
1010
print("Testing 2D plots")
1111
X, Y, mod = data_create2(40, 1)
12-
reg = regression(X, Y, 0.1, lambda1=0.01, lambda2=0.01, intercept=False)
12+
reg = regression(X, Y, 0.1, lambda1=1e-4, lambda2=1e-4, intercept=False)
1313
reg.plot_2d(fig=plt.figure())
1414
reg = regression(
15-
X, Y, 0.1, lambda1=0.01, lambda2=0.01, intercept=True, normalise=True
15+
X, Y, 0.1, lambda1=1e-4, lambda2=1e-4, intercept=True, normalise=True
1616
)
1717
reg.plot_2d(fig=plt.figure())
18-
exp = explain(X, Y, 0.1, 5, lambda1=0.01, lambda2=0.01)
18+
exp = explain(X, Y, 0.1, 5, lambda1=1e-4, lambda2=1e-4)
1919
exp.plot_2d(fig=plt.figure())
2020
Y -= Y.min() - 0.01
2121
Y /= Y.max() + 0.01
22-
exp = explain(X, Y, 1.0, 5, lambda1=0.01, lambda2=0.01, logit=True)
22+
exp = explain(X, Y, 1.0, 5, lambda1=1e-4, lambda2=1e-4, logit=True)
2323
exp.plot_2d(fig=plt.figure())
2424
# plt.show()
2525
plt.close("all")
@@ -28,17 +28,17 @@ def test_plot2d():
2828
def test_dist():
2929
print("Testing dist plots")
3030
X, Y, mod = data_create2(40, 5)
31-
reg = regression(X, Y, 0.1, lambda1=0.01, lambda2=0.01, intercept=False)
31+
reg = regression(X, Y, 0.1, lambda1=1e-4, lambda2=1e-4, intercept=False)
3232
reg.plot_dist(fig=plt.figure())
3333
reg = regression(
34-
X, Y, 0.1, lambda1=0.01, lambda2=0.01, intercept=True, normalise=True
34+
X, Y, 0.1, lambda1=1e-4, lambda2=1e-4, intercept=True, normalise=True
3535
)
3636
reg.plot_dist(fig=plt.figure())
37-
exp = explain(X, Y, 0.1, 5, lambda1=0.01, lambda2=0.01)
37+
exp = explain(X, Y, 0.1, 5, lambda1=1e-4, lambda2=1e-4)
3838
exp.plot_dist(fig=plt.figure())
3939
Y = Y - Y.min() - 0.01
4040
Y /= Y.max() + 0.01
41-
exp = explain(X, Y, 1.0, 5, lambda1=0.01, lambda2=0.01, normalise=True, logit=True)
41+
exp = explain(X, Y, 1.0, 5, lambda1=1e-4, lambda2=1e-4, normalise=True, logit=True)
4242
exp.plot_dist(fig=plt.figure())
4343
reg.plot_subset(fig=plt.figure())
4444
exp.plot_subset(fig=plt.figure())
@@ -50,7 +50,7 @@ def test_img():
5050
print("Testing image plots")
5151
X, Y, mod = data_create2(200, 16)
5252
X[:, 6] = X[:, 9] = X[:, 11] = 0
53-
exp = explain(X, Y, 0.1, 5, lambda1=0.01, lambda2=0.01)
53+
exp = explain(X, Y, 0.1, 5, lambda1=1e-4, lambda2=1e-4)
5454
exp.plot_image(4, 4, fig=plt.figure())
5555
# plt.show()
5656
plt.close("all")

tests/test_slise.py

+73-67
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
from warnings import catch_warnings
12
import numpy as np
23
from slise.optimisation import loss_smooth
34
from slise.data import add_intercept_column, scale_same
@@ -29,80 +30,82 @@ def test_initialise_old():
2930

3031

3132
def test_initialise():
32-
print("Testing initialisation")
33-
X, Y = data_create(20, 5)
34-
zero = np.zeros(5)
35-
alpha, beta = initialise_candidates(X, Y, 0.1)
36-
assert beta > 0
37-
assert loss_smooth(alpha, X, Y, 0.1, beta=beta) <= loss_smooth(
38-
zero, X, Y, 0.1, beta=beta
39-
)
40-
X, Y = data_create(20, 12)
41-
zero = np.zeros(12)
42-
alpha, beta = initialise_candidates(X, Y, 0.1)
43-
assert beta > 0
44-
assert loss_smooth(alpha, X, Y, 0.1, beta=beta) <= loss_smooth(
45-
zero, X, Y, 0.1, beta=beta
46-
)
47-
X, Y = data_create(20, 11)
48-
X = add_intercept_column(X)
49-
zero = np.zeros(12)
50-
alpha, beta = initialise_candidates(X, Y, 0.1)
51-
assert beta > 0
52-
assert loss_smooth(alpha, X, Y, 0.1, beta=beta) <= loss_smooth(
53-
zero, X, Y, 0.1, beta=beta
54-
)
55-
X, Y = data_create(20, 12)
56-
x = np.random.normal(size=12)
57-
X = X - x[None, :]
58-
zero = np.zeros(12)
59-
alpha, beta = initialise_candidates(X, Y, 0.1)
60-
assert beta > 0
61-
assert loss_smooth(alpha, X, Y, 0.1, beta=beta) <= loss_smooth(
62-
zero, X, Y, 0.1, beta=beta
63-
)
33+
with catch_warnings(record=True) as w:
34+
print("Testing initialisation")
35+
X, Y = data_create(20, 5)
36+
zero = np.zeros(5)
37+
alpha, beta = initialise_candidates(X, Y, 0.1)
38+
assert beta > 0
39+
assert loss_smooth(alpha, X, Y, 0.1, beta=beta) <= loss_smooth(
40+
zero, X, Y, 0.1, beta=beta
41+
)
42+
X, Y = data_create(20, 12)
43+
zero = np.zeros(12)
44+
alpha, beta = initialise_candidates(X, Y, 0.1)
45+
assert beta > 0
46+
assert loss_smooth(alpha, X, Y, 0.1, beta=beta) <= loss_smooth(
47+
zero, X, Y, 0.1, beta=beta
48+
)
49+
X, Y = data_create(20, 11)
50+
X = add_intercept_column(X)
51+
zero = np.zeros(12)
52+
alpha, beta = initialise_candidates(X, Y, 0.1)
53+
assert beta > 0
54+
assert loss_smooth(alpha, X, Y, 0.1, beta=beta) <= loss_smooth(
55+
zero, X, Y, 0.1, beta=beta
56+
)
57+
X, Y = data_create(20, 12)
58+
x = np.random.normal(size=12)
59+
X = X - x[None, :]
60+
zero = np.zeros(12)
61+
alpha, beta = initialise_candidates(X, Y, 0.1)
62+
assert beta > 0
63+
assert loss_smooth(alpha, X, Y, 0.1, beta=beta) <= loss_smooth(
64+
zero, X, Y, 0.1, beta=beta
65+
)
6466

6567

6668
def test_initialise2():
67-
print("Testing initialisation2")
68-
X, Y = data_create(20, 5)
69-
zero = np.zeros(5)
70-
alpha, beta = initialise_candidates2(X, Y, 0.1)
71-
assert beta > 0
72-
assert loss_smooth(alpha, X, Y, 0.1, beta=beta) <= loss_smooth(
73-
zero, X, Y, 0.1, beta=beta
74-
)
75-
X, Y = data_create(20, 12)
76-
zero = np.zeros(12)
77-
alpha, beta = initialise_candidates2(X, Y, 0.1)
78-
assert beta > 0
79-
assert loss_smooth(alpha, X, Y, 0.1, beta=beta) <= loss_smooth(
80-
zero, X, Y, 0.1, beta=beta
81-
)
82-
X, Y = data_create(20, 11)
83-
X = add_intercept_column(X)
84-
zero = np.zeros(12)
85-
alpha, beta = initialise_candidates2(X, Y, 0.1)
86-
assert beta > 0
87-
assert loss_smooth(alpha, X, Y, 0.1, beta=beta) <= loss_smooth(
88-
zero, X, Y, 0.1, beta=beta
89-
)
90-
X, Y = data_create(20, 12)
91-
x = np.random.normal(size=12)
92-
X = X - x[None, :]
93-
zero = np.zeros(12)
94-
alpha, beta = initialise_candidates2(X, Y, 0.1)
95-
assert beta > 0
96-
assert loss_smooth(alpha, X, Y, 0.1, beta=beta) <= loss_smooth(
97-
zero, X, Y, 0.1, beta=beta
98-
)
69+
with catch_warnings(record=True) as w:
70+
print("Testing initialisation2")
71+
X, Y = data_create(20, 5)
72+
zero = np.zeros(5)
73+
alpha, beta = initialise_candidates2(X, Y, 0.1)
74+
assert beta > 0
75+
assert loss_smooth(alpha, X, Y, 0.1, beta=beta) <= loss_smooth(
76+
zero, X, Y, 0.1, beta=beta
77+
)
78+
X, Y = data_create(20, 12)
79+
zero = np.zeros(12)
80+
alpha, beta = initialise_candidates2(X, Y, 0.1)
81+
assert beta > 0
82+
assert loss_smooth(alpha, X, Y, 0.1, beta=beta) <= loss_smooth(
83+
zero, X, Y, 0.1, beta=beta
84+
)
85+
X, Y = data_create(20, 11)
86+
X = add_intercept_column(X)
87+
zero = np.zeros(12)
88+
alpha, beta = initialise_candidates2(X, Y, 0.1)
89+
assert beta > 0
90+
assert loss_smooth(alpha, X, Y, 0.1, beta=beta) <= loss_smooth(
91+
zero, X, Y, 0.1, beta=beta
92+
)
93+
X, Y = data_create(20, 12)
94+
x = np.random.normal(size=12)
95+
X = X - x[None, :]
96+
zero = np.zeros(12)
97+
alpha, beta = initialise_candidates2(X, Y, 0.1)
98+
assert beta > 0
99+
assert loss_smooth(alpha, X, Y, 0.1, beta=beta) <= loss_smooth(
100+
zero, X, Y, 0.1, beta=beta
101+
)
99102

100103

101104
def test_slise_reg():
102105
print("Testing slise regression")
103106
X, Y, mod = data_create2(40, 5)
104107
reg1 = regression(
105-
X, Y, epsilon=0.1, lambda1=0.01, lambda2=0.01, intercept=True, normalise=True,
108+
X, Y, epsilon=0.1, lambda1=1e-4, lambda2=1e-4, intercept=True, normalise=True,
106109
)
107110
reg1.print()
108111
Yp = mat_mul_inter(X, reg1.coefficients)
@@ -119,7 +122,7 @@ def test_slise_reg():
119122
), f"SLISE loss should be negative ({reg1.score():.2f}, {reg1.subset().mean():.2f})"
120123
assert 1.0 >= reg1.subset().mean() > 0.75
121124
reg2 = regression(
122-
X, Y, epsilon=0.1, lambda1=0.01, lambda2=0.01, intercept=True, normalise=False,
125+
X, Y, epsilon=0.1, lambda1=1e-4, lambda2=1e-4, intercept=True, normalise=False,
123126
)
124127
reg2.print()
125128
assert (
@@ -141,7 +144,7 @@ def test_slise_exp():
141144
X, Y, mod = data_create2(100, 5)
142145
x = np.random.normal(size=5)
143146
y = np.random.normal()
144-
reg = explain(X, Y, 0.1, x, y, lambda1=0.01, lambda2=0.01, normalise=True)
147+
reg = explain(X, Y, 0.1, x, y, lambda1=1e-4, lambda2=1e-4, normalise=True)
145148
reg.print()
146149
assert reg.score() <= 0, f"Slise loss should usually be <=0 ({reg.score():.2f})"
147150
assert 1.0 >= reg.subset().mean() > 0.0
@@ -153,11 +156,14 @@ def test_slise_exp():
153156
assert reg.score() <= 0, f"Slise loss should usually be <=0 ({reg.score():.2f})"
154157
assert 1.0 >= reg.subset().mean() > 0.0
155158
reg = explain(X, Y, 0.1, x, y, lambda1=0, lambda2=0, normalise=False)
159+
reg.print()
156160
assert reg.score() <= 0, f"Slise loss should usually be <=0 ({reg.score():.2f})"
157161
assert 1.0 >= reg.subset().mean() > 0.0
158162
reg = explain(X, Y, 0.1, 19, lambda1=0.01, lambda2=0.01, normalise=False)
163+
reg.print()
159164
assert reg.score() <= 0, f"Slise loss should usually be <=0 ({reg.score():.2f})"
160165
assert 1.0 >= reg.subset().mean() > 0.0
161166
reg = explain(X, Y, 0.1, 19, lambda1=0, lambda2=0, normalise=False)
167+
reg.print()
162168
assert reg.score() <= 0, f"Slise loss should usually be <=0 ({reg.score():.2f})"
163169
assert 1.0 >= reg.subset().mean() > 0.0

0 commit comments

Comments
 (0)