Skip to content
This repository was archived by the owner on Nov 10, 2025. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
f349f84
multinomial regression integrated.
elibol Oct 6, 2021
0b8ef7a
revert changes for autoscaler.
elibol Oct 6, 2021
42c5abc
add sklearn to testing requirements.
elibol Oct 6, 2021
5f51309
pylint.
elibol Oct 6, 2021
807567e
black
elibol Oct 6, 2021
984038c
Merge branch 'master' into multinomial-lr
vinamrabenara Oct 6, 2021
ab775b6
lint.
elibol Oct 6, 2021
446d28f
Merge branch 'multinomial-lr' of github.com:nums-project/nums into mu…
elibol Oct 6, 2021
146f946
Merge branch 'master' into multinomial-lr
elibol Oct 6, 2021
076763d
Merge branch 'master' into multinomial-lr
elibol Oct 7, 2021
8880dfb
Merge branch 'master' into multinomial-lr
vinamrabenara Oct 13, 2021
2fd36a7
make default penalty "l2"
vinamrabenara Oct 13, 2021
0269d5f
minor fix
vinamrabenara Oct 13, 2021
081cd47
add tests with sklearn
vinamrabenara Oct 13, 2021
0264fd6
fix for l2 penalty
vinamrabenara Oct 14, 2021
c0196a1
Merge branch 'master' into multinomial-lr
vinamrabenara Oct 14, 2021
20347d4
drop this commit
vinamrabenara Oct 14, 2021
a587c08
Merge branch 'multinomial-lr' of https://github.com/nums-project/nums…
vinamrabenara Oct 14, 2021
5c6103d
Revert "drop this commit"
vinamrabenara Oct 14, 2021
b35bf8c
fix comments
elibol Oct 15, 2021
e1e6979
Merge branch 'multinomial-lr' of github.com:nums-project/nums into mu…
elibol Oct 15, 2021
2e5c96f
Merge branch 'master' into multinomial-lr
vinamrabenara Oct 19, 2021
e36868b
Merge branch 'master' into multinomial-lr
vinamrabenara Oct 19, 2021
847443c
Merge branch 'master' into multinomial-lr
vinamrabenara Oct 21, 2021
2c1c8f1
fix
vinamrabenara Oct 22, 2021
37a7a4d
Merge branch 'master' into multinomial-lr
vinamrabenara Oct 22, 2021
f8d2fda
Merge branch 'master' into multinomial-lr
vinamrabenara Oct 22, 2021
69519da
skip long tests
vinamrabenara Oct 22, 2021
c71067d
Merge branch 'master' into multinomial-lr
vinamrabenara Oct 22, 2021
a4e932f
Merge branch 'master' into multinomial-lr
vinamrabenara Oct 25, 2021
70d0f48
Merge branch 'master' into multinomial-lr
vinamrabenara Oct 28, 2021
482dcba
misc
vinamrabenara Oct 28, 2021
353b015
Merge branch 'master' into multinomial-lr
vinamrabenara Oct 28, 2021
8866026
Merge branch 'master' into multinomial-lr
vinamrabenara Oct 31, 2021
774e2a2
Merge branch 'master' into multinomial-lr
vinamrabenara Nov 1, 2021
238f8de
Merge branch 'master' into multinomial-lr
vinamrabenara Nov 4, 2021
e124a49
Merge branch 'master' into multinomial-lr
vinamrabenara Nov 10, 2021
04daece
Merge branch 'master' into multinomial-lr
vinamrabenara Nov 21, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
55 changes: 30 additions & 25 deletions nums/models/glms.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
#
# The link function is expressed as follows.
# E(Y | X) = mu
# Define the linear predictor eta = X.T @ beta
# Define the linear predictor eta = X @ beta
# Define g as the link function, so that g(mu) = eta
# E(Y | X) = g^{-1}(eta)
#
Expand Down Expand Up @@ -62,7 +62,35 @@
# g(mu) = (b')^{-1}(mu) = ln(mu/(1-mu)) = ln(p/(1-p)) = theta(p)


class GLM(object):
class Model(object):
def forward(self, X, beta=None):
raise NotImplementedError()

def objective(
self,
X: BlockArray,
y: BlockArray,
beta: BlockArray = None,
mu: BlockArray = None,
):
raise NotImplementedError()

def gradient(
self,
X: BlockArray,
y: BlockArray,
mu: BlockArray = None,
beta: BlockArray = None,
):
# gradient w.r.t. beta.
raise NotImplementedError()

def hessian(self, X: BlockArray, y: BlockArray, mu: BlockArray = None):
# Hessian w.r.t. beta.
raise NotImplementedError()


class GLM(Model):
def __init__(
self,
penalty="none",
Expand Down Expand Up @@ -207,29 +235,6 @@ def predict(self, X):
def link_inv(self, eta: BlockArray):
raise NotImplementedError()

def objective(
self,
X: BlockArray,
y: BlockArray,
beta: BlockArray = None,
mu: BlockArray = None,
):
raise NotImplementedError()

def gradient(
self,
X: BlockArray,
y: BlockArray,
mu: BlockArray = None,
beta: BlockArray = None,
):
# gradient w.r.t. beta.
raise NotImplementedError()

def hessian(self, X: BlockArray, y: BlockArray, mu: BlockArray = None):
# Hessian w.r.t. beta.
raise NotImplementedError()

def deviance(self, y, y_pred):
raise NotImplementedError()

Expand Down
8 changes: 4 additions & 4 deletions nums/models/lbfgs.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,14 @@
import numpy as np
from nums.core.array.application import ArrayApplication
from nums.core.application_manager import instance as _instance
from nums.models.glms import GLM
from nums.models.glms import Model


# Based on Nocedal and Wright, chapters 2, 3, 6 and 7.


class BackTrackingLineSearch(object):
def __init__(self, model: GLM):
def __init__(self, model: Model):
self.app = _instance()
self.model = model

Expand Down Expand Up @@ -64,9 +64,9 @@ def __init__(self, k, s, y):


class LBFGS(object):
def __init__(self, model: GLM, m=10, max_iter=100, thresh=1e-4, dtype=np.float64):
def __init__(self, model: Model, m=10, max_iter=100, thresh=1e-4, dtype=np.float64):
self.app: ArrayApplication = _instance()
self.model: GLM = model
self.model: Model = model
self.m = m
self.max_iter = max_iter
self.thresh = thresh
Expand Down
Loading