Skip to content

Commit 4faaed3

Browse files
authored
Merge pull request #129 from mtsousa/feature_ridge
Implements the ridge regression
2 parents 4e9fbd6 + 23c0356 commit 4faaed3

File tree

2 files changed

+49
-4
lines changed

2 files changed

+49
-4
lines changed

sysidentpy/model_structure_selection/forward_regression_orthogonal_least_squares.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ class FROLS(Estimators, BaseMSS):
8989
eps : float, default=np.finfo(np.float64).eps
9090
Normalization factor of the normalized filters.
9191
ridge_param : float, default=np.finfo(np.float64).eps
92-
Regularization parameter used in ridge regression
92+
Regularization parameter used in ridge regression.
9393
gama : float, default=0.2
9494
The leakage factor of the Leaky LMS method.
9595
weight : float, default=0.02

sysidentpy/parameter_estimation/estimators.py

+48-3
Original file line numberDiff line numberDiff line change
@@ -23,13 +23,13 @@ def __init__(
2323
offset_covariance=0.2,
2424
mu=0.01,
2525
eps=np.finfo(np.float64).eps,
26-
ridge_param=np.finfo(np.float64).eps, # for regularized ridge regression
26+
ridge_param=np.finfo(np.float64).eps,
2727
gama=0.2,
2828
weight=0.02,
2929
basis_function=None,
3030
):
3131
self.eps = eps
32-
self.ridge_param = ridge_param # for regularized ridge regression
32+
self.ridge_param = ridge_param
3333
self.mu = mu
3434
self.offset_covariance = offset_covariance
3535
self.max_lag = max_lag
@@ -54,6 +54,7 @@ def _validate_params(self):
5454
"ridge_param": self.ridge_param,
5555
"gama": self.gama,
5656
"weight": self.weight,
57+
"ridge_param": self.ridge_param,
5758
}
5859
for attribute, value in attributes.items():
5960
if not isinstance(value, (np.integer, int, float)):
@@ -118,7 +119,7 @@ def least_squares(self, psi, y):
118119
theta = np.linalg.lstsq(psi, y, rcond=None)[0]
119120
return theta
120121

121-
def ridge_regression(self, psi, y):
122+
def ridge_regression_classic(self, psi, y):
122123
"""Estimate the model parameters using the regularized least squares method
123124
known as ridge regression. Based on the least_squares module and uses
124125
the same data format but you need to pass ridge_param in the call to
@@ -936,3 +937,47 @@ def least_mean_squares_mixed_norm(self, psi, y):
936937
theta[:, i] = tmp_list.flatten()
937938

938939
return theta[:, -1].reshape(-1, 1)
940+
941+
def ridge_regression(self, psi, y):
942+
"""Estimate the model parameters using SVD and Ridge Regression method.
943+
944+
Parameters
945+
----------
946+
psi : ndarray of floats
947+
The information matrix of the model.
948+
y : array-like of shape = y_training
949+
The data used to training the model.
950+
951+
Returns
952+
-------
953+
theta : array-like of shape = number_of_model_elements
954+
The estimated parameters of the model.
955+
956+
References
957+
----------
958+
- Manuscript: Hoerl, A. E.; Kennard, R. W. Ridge regression:
959+
applications to nonorthogonal problems. Technometrics,
960+
Taylor & Francis, v. 12, n. 1, p. 69-82, 1970.
961+
962+
- StackExchange: whuber. The proof of shrinking coefficients using ridge
963+
regression through "spectral decomposition".
964+
Cross Validated, accessed 21 September 2023,
965+
https://stats.stackexchange.com/q/220324
966+
"""
967+
self._check_linear_dependence_rows(psi)
968+
969+
y = y[self.max_lag :, 0].reshape(-1, 1)
970+
971+
try:
972+
U, d, Vt = np.linalg.svd(psi, full_matrices=False)
973+
D = np.diag(d)
974+
I = np.identity(len(D))
975+
976+
theta = Vt.T @ np.linalg.inv(D**2 + self.ridge_param*I) @ D @ U.T @ y
977+
except:
978+
warnings.warn("The SVD computation does not converge. Value calculated with the classic algorithm",
979+
stacklevel=2)
980+
981+
theta = self.ridge_regression_classic(psi, y)
982+
983+
return theta

0 commit comments

Comments
 (0)