Skip to content

Commit 4459557

Browse files
authored
Merge pull request #33 from bcbi/dpa/internal
Add the private `Internal` submodule
2 parents 3cd3305 + 7a78c91 commit 4459557

10 files changed

Lines changed: 163 additions & 8 deletions

Project.toml

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,12 @@
11
name = "MaximumLikelihoodProblems"
22
uuid = "e149cee4-a9c8-4a03-a7b6-d91720412337"
33
authors = ["Dilum Aluthge"]
4-
version = "0.1.4"
4+
version = "0.1.5"
55

66
[deps]
7+
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
8+
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
9+
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
710
LogDensityProblems = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c"
811
ProgressMeter = "92933f4c-e287-5a05-a399-4b506db050ca"
912
TransformVariables = "84d833dd-6860-57f9-a1a7-6da5db126cff"
@@ -25,7 +28,6 @@ julia = "1.3"
2528
[extras]
2629
CategoricalArrays = "324d7699-5711-5eae-9e2f-1d82baa6b597"
2730
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
28-
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
2931
Econometrics = "4d6a76a9-bfbc-5492-8924-cf6ed7875f06"
3032
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
3133
GLM = "38e38edf-8417-5370-95a0-9cbb8c7f171a"
@@ -36,4 +38,4 @@ StatsFuns = "4c63d2b9-4356-54db-8cca-17b64c39e42c"
3638
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
3739

3840
[targets]
39-
test = ["CategoricalArrays", "DataFrames", "Distributions", "Econometrics", "ForwardDiff", "GLM", "NNlib", "Random", "Statistics", "StatsFuns", "Test"]
41+
test = ["CategoricalArrays", "DataFrames", "Econometrics", "ForwardDiff", "GLM", "NNlib", "Random", "Statistics", "StatsFuns", "Test"]

examples/multinomial_logistic_regression.jl

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,10 +57,12 @@ transformed_gradient_problem = LogDensityProblems.ADgradient(:ForwardDiff,
5757
θ_hat_initial = (; β = β_hat_initial_guess)
5858

5959
# θ_hat:
60+
# (I recommend changing the value of `show_progress_meter` to `true` so that
61+
# you can see the progress.)
6062

6163
θ_hat = MaximumLikelihoodProblems.fit(transformed_gradient_problem,
6264
θ_hat_initial;
63-
show_progress_meter = false)
65+
show_progress_meter = false) # change this `false` to `true`
6466

6567
# β_hat:
6668

src/Internal.jl

Lines changed: 109 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,109 @@
1+
module Internal
2+
3+
import ForwardDiff # TODO: remove the dependency on ForwardDiff.jl
4+
import LogDensityProblems
5+
import LinearAlgebra
6+
import TransformVariables
7+
8+
const _default_fuzz_factor = 0.01
9+
10+
function gradient_vector(transformed_gradient_problem,
11+
theta)
12+
transformed_log_density = parent(transformed_gradient_problem)
13+
transformation = transformed_log_density.transformation
14+
theta_inversetransformed = TransformVariables.inverse(transformation,
15+
theta)
16+
result = _gradient_vector(transformed_gradient_problem,
17+
theta_inversetransformed)
18+
return result
19+
end
20+
21+
function _gradient_vector(transformed_gradient_problem,
22+
theta)
23+
log_likelihood_value, gradient = LogDensityProblems.logdensity_and_gradient(transformed_gradient_problem,
24+
theta)
25+
return gradient
26+
end
27+
28+
function hessian_matrix(transformed_gradient_problem,
29+
theta)
30+
transformed_log_density = parent(transformed_gradient_problem)
31+
transformation = transformed_log_density.transformation
32+
theta_inversetransformed = TransformVariables.inverse(transformation,
33+
theta)
34+
result = _hessian_matrix(transformed_gradient_problem,
35+
theta_inversetransformed)
36+
return result
37+
end
38+
39+
function _hessian_matrix(transformed_gradient_problem,
40+
theta)
41+
@assert transformed_gradient_problem isa LogDensityProblems.ForwardDiffLogDensity
42+
= transformed_gradient_problem.
43+
logdensity_closure = LogDensityProblems._logdensity_closure(ℓ)
44+
hessian = ForwardDiff.hessian(logdensity_closure,
45+
theta)
46+
return hessian
47+
end
48+
49+
function _is_approximately_zero(a::AbstractArray;
50+
tolerance = 1e-4)
51+
result = all( abs.(a) .< tolerance )
52+
return result
53+
end
54+
55+
function _is_approximately_hermitian(m::AbstractMatrix;
56+
tolerance = 1e-4)
57+
LinearAlgebra.checksquare(m)
58+
result = _is_approximately_zero(m - LinearAlgebra.adjoint(m);
59+
tolerance = tolerance)
60+
return result
61+
end
62+
63+
function _all_eigenvalues_are_negative(m::AbstractMatrix;
64+
fuzz_factor = _default_fuzz_factor)
65+
eigvals = LinearAlgebra.eigvals(m)
66+
a = all( eigvals .< 0 )
67+
b = all( eigvals .< -abs(fuzz_factor) )
68+
result = a && b
69+
return result
70+
end
71+
72+
function _all_eigenvalues_are_positive(m::AbstractMatrix;
73+
fuzz_factor = _default_fuzz_factor)
74+
eigvals = LinearAlgebra.eigvals(m)
75+
a = all( eigvals .> 0 )
76+
b = all( eigvals .> abs(fuzz_factor) )
77+
result = a && b
78+
return result
79+
end
80+
81+
function _is_approximately_negative_definite(m::AbstractMatrix;
82+
tolerance = 1e-4,
83+
fuzz_factor = _default_fuzz_factor)
84+
# Let `m` be a Hermitian matrix. Then `m` is negative
85+
# definite if and only if all of its eigenvalues are
86+
# negative.
87+
a = _is_approximately_hermitian(m;
88+
tolerance = tolerance)
89+
b = _all_eigenvalues_are_negative(m;
90+
fuzz_factor = fuzz_factor)
91+
result = a && b
92+
return result
93+
end
94+
95+
function _is_approximately_positive_definite(m::AbstractMatrix;
96+
tolerance = 1e-4,
97+
fuzz_factor = _default_fuzz_factor)
98+
# Let `m` be a Hermitian matrix. Then `m` is positive
99+
# definite if and only if all of its eigenvalues are
100+
# positive.
101+
a = _is_approximately_hermitian(m;
102+
tolerance = tolerance)
103+
b = _all_eigenvalues_are_positive(m;
104+
fuzz_factor = fuzz_factor)
105+
result = a && b
106+
return result
107+
end
108+
109+
end # module

src/MaximumLikelihoodProblems.jl

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,4 +7,6 @@ include("types.jl")
77
include("fit.jl")
88
include("loglikelihood.jl")
99

10+
include("Internal.jl")
11+
1012
end # module

src/loglikelihood.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,13 @@ import LogDensityProblems
22
import TransformVariables
33

44
"""
5-
loglikelihood(transformed_gradient_problem, theta_hat)
5+
loglikelihood(transformed_gradient_problem, theta)
66
7-
Return the value of the log likelihood function evaluated at `theta_hat`.
7+
Return the value of the log likelihood function evaluated at `theta`.
88
99
# Arguments
1010
- `transformed_gradient_problem`
11-
- `theta_hat`
11+
- `theta`
1212
"""
1313
function loglikelihood(transformed_gradient_problem,
1414
theta)

src/types.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
struct ConvergenceException
1+
struct ConvergenceException <: Exception
22
end

test/examples.jl

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ Test.@testset "examples" begin
1717

1818
Test.@testset "multinomial logistic regression" begin
1919
Random.seed!(123)
20+
@info("This next test may go five to fifteen minutes without producing any output")
2021
include(joinpath(examples_directory, "multinomial_logistic_regression.jl"))
2122
include(joinpath(test_directory, "examples", "multinomial_logistic_regression.jl"))
2223
end

test/examples/linear_regression.jl

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
import MaximumLikelihoodProblems
2+
13
import GLM
24
import Statistics
35
import Test
@@ -38,6 +40,17 @@ Test.@test Statistics.mean(square_error) < 1e-3
3840
Test.@test Statistics.mean(absolute_error_proportional) < 0.007
3941
Test.@test Statistics.mean(square_error_proportional) < 1e-3
4042

43+
gradient_vector_at_θ_hat = MaximumLikelihoodProblems.Internal.gradient_vector(transformed_gradient_problem,
44+
θ_hat)
45+
hessian_matrix_at_θ_hat = MaximumLikelihoodProblems.Internal.hessian_matrix(transformed_gradient_problem,
46+
θ_hat)
47+
Test.@test MaximumLikelihoodProblems.Internal._is_approximately_zero(gradient_vector_at_θ_hat)
48+
Test.@test MaximumLikelihoodProblems.Internal._is_approximately_hermitian(hessian_matrix_at_θ_hat)
49+
Test.@test MaximumLikelihoodProblems.Internal._is_approximately_negative_definite(hessian_matrix_at_θ_hat;
50+
fuzz_factor = 10)
51+
Test.@test MaximumLikelihoodProblems.Internal._is_approximately_positive_definite(-hessian_matrix_at_θ_hat;
52+
fuzz_factor = 10)
53+
4154
n, p = size(X)
4255
beta_hat_ols = ( X' * X )\( X' * y )
4356
beta_hat_mle = beta_hat_ols
@@ -52,6 +65,7 @@ Test.@test isapprox(σ_hat, sigma_hat_ols; atol = 1e-4)
5265
Test.@test isapprox(σ_hat, sigma_hat_mle; atol = 1e-4)
5366
Test.@test isapprox(β_hat, beta_hat_ols)
5467
Test.@test isapprox(β_hat, beta_hat_mle)
68+
5569
external_model_glm = GLM.lm(X, y)
5670
sigma_hat_external_model_glm = GLM.dispersion(external_model_glm)
5771
Test.@test isapprox(σ_hat, sigma_hat_external_model_glm; atol = 1e-4)

test/examples/logistic_regression.jl

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
import MaximumLikelihoodProblems
2+
13
import GLM
24
import Statistics
35
import Test
@@ -26,6 +28,18 @@ Test.@test Statistics.mean(absolute_error) < 0.070
2628
Test.@test Statistics.mean(square_error) < 0.007
2729
Test.@test Statistics.mean(absolute_error_proportional) < 0.050
2830
Test.@test Statistics.mean(square_error_proportional) < 0.010
31+
32+
gradient_vector_at_θ_hat = MaximumLikelihoodProblems.Internal.gradient_vector(transformed_gradient_problem,
33+
θ_hat)
34+
hessian_matrix_at_θ_hat = MaximumLikelihoodProblems.Internal.hessian_matrix(transformed_gradient_problem,
35+
θ_hat)
36+
Test.@test MaximumLikelihoodProblems.Internal._is_approximately_zero(gradient_vector_at_θ_hat)
37+
Test.@test MaximumLikelihoodProblems.Internal._is_approximately_hermitian(hessian_matrix_at_θ_hat)
38+
Test.@test MaximumLikelihoodProblems.Internal._is_approximately_negative_definite(hessian_matrix_at_θ_hat;
39+
fuzz_factor = 10)
40+
Test.@test MaximumLikelihoodProblems.Internal._is_approximately_positive_definite(-hessian_matrix_at_θ_hat;
41+
fuzz_factor = 10)
42+
2943
external_model_glm = GLM.glm(X, y, GLM.Binomial(), GLM.LogitLink())
3044
beta_hat_external_model_glm = GLM.coef(external_model_glm)
3145
Test.@test isapprox(β_hat, beta_hat_external_model_glm)

test/examples/multinomial_logistic_regression.jl

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,17 @@ Test.@test Statistics.mean(square_error) < 0.0095
3131
Test.@test Statistics.mean(absolute_error_proportional) < 0.05
3232
Test.@test Statistics.mean(square_error_proportional) < 0.0055
3333

34+
gradient_vector_at_θ_hat = MaximumLikelihoodProblems.Internal.gradient_vector(transformed_gradient_problem,
35+
θ_hat)
36+
hessian_matrix_at_θ_hat = MaximumLikelihoodProblems.Internal.hessian_matrix(transformed_gradient_problem,
37+
θ_hat)
38+
Test.@test MaximumLikelihoodProblems.Internal._is_approximately_zero(gradient_vector_at_θ_hat)
39+
Test.@test MaximumLikelihoodProblems.Internal._is_approximately_hermitian(hessian_matrix_at_θ_hat)
40+
Test.@test MaximumLikelihoodProblems.Internal._is_approximately_negative_definite(hessian_matrix_at_θ_hat;
41+
fuzz_factor = 10)
42+
Test.@test MaximumLikelihoodProblems.Internal._is_approximately_positive_definite(-hessian_matrix_at_θ_hat;
43+
fuzz_factor = 10)
44+
3445
external_df = DataFrames.DataFrame()
3546
external_df[!, :X_2] = X[:, 2]
3647
n = size(X, 1)

0 commit comments

Comments
 (0)