Skip to content

Commit 3cd3305

Browse files
authored
Merge pull request #32 from bcbi/dpa/loglikelihood
Add the `loglikelihood` function
2 parents 7325315 + 52d3c37 commit 3cd3305

11 files changed

Lines changed: 79 additions & 19 deletions

.gitignore

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,14 @@
22
*.jl.cov
33
*.jl.mem
44
.DS_Store
5+
56
/Manifest.toml
7+
68
/dev/
9+
10+
/docs/Manifest.toml
711
/docs/build/
812
/docs/site/
9-
/docs/Manifest.toml
13+
/docs/src/examples/
14+
15+
/test/Manifest.toml

Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "MaximumLikelihoodProblems"
22
uuid = "e149cee4-a9c8-4a03-a7b6-d91720412337"
33
authors = ["Dilum Aluthge"]
4-
version = "0.1.3"
4+
version = "0.1.4"
55

66
[deps]
77
LogDensityProblems = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c"

docs/make.jl

Lines changed: 18 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -5,34 +5,38 @@ import Literate
55
import Random
66

77
_this_filename = @__FILE__
8-
_docs_directory = dirname(_this_filename)
9-
root_directory = dirname(_docs_directory)
10-
docs_directory = joinpath(root_directory, "docs")
11-
docs_source_directory = joinpath(root_directory, "docs", "src")
12-
examples_directory = joinpath(root_directory, "examples")
8+
_docs = dirname(_this_filename)
9+
root = dirname(_docs)
10+
docs = joinpath(root, "docs")
11+
docs_src = joinpath(root, "docs", "src")
12+
docs_src_examples = joinpath(root, "docs", "src", "examples")
13+
examples_directory = joinpath(root, "examples")
1314

1415
examples_list = ["linear_regression",
1516
"logistic_regression",
1617
"multinomial_logistic_regression"]
1718

1819
for example in examples_list
19-
example_filename = joinpath(examples_directory, "$(example).jl")
20+
example_filename = joinpath(examples_directory,
21+
"$(example).jl")
2022
Literate.markdown(example_filename,
21-
docs_source_directory)
23+
docs_src_examples)
2224
end
2325

2426
Random.seed!(123)
2527

28+
generated_examples = [joinpath("examples", "$(example).md") for example in examples_list]
29+
2630
Documenter.makedocs(;
27-
root=docs_directory,
28-
modules=[MaximumLikelihoodProblems],
29-
format=Documenter.HTML(),
31+
root = docs,
32+
modules = [MaximumLikelihoodProblems],
33+
format = Documenter.HTML(),
3034
pages = vcat(["Home" => "index.md",],
31-
["$(example).md" for example in examples_list],
35+
generated_examples,
3236
["api_public.md",]),
33-
repo="https://github.com/bcbi/MaximumLikelihoodProblems.jl/blob/{commit}{path}#L{line}",
34-
sitename="MaximumLikelihoodProblems.jl",
35-
authors="Dilum P. Aluthge",
37+
repo = "https://github.com/bcbi/MaximumLikelihoodProblems.jl/blob/{commit}{path}#L{line}",
38+
sitename = "MaximumLikelihoodProblems.jl",
39+
authors = "Dilum P. Aluthge",
3640
)
3741

3842
Documenter.deploydocs(;

docs/src/api_public.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,10 @@
11
# Public API
22

3+
```@index
4+
Pages = ["api_public.md"]
5+
```
6+
37
```@docs
48
MaximumLikelihoodProblems.fit
9+
MaximumLikelihoodProblems.loglikelihood
510
```

examples/linear_regression.jl

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -75,3 +75,7 @@ transformed_gradient_problem = LogDensityProblems.ADgradient(:ForwardDiff,
7575
# σ_hat:
7676

7777
σ_hat = θ_hat[]
78+
79+
# Value of the log likelihood function evaluated at θ_hat:
80+
81+
MaximumLikelihoodProblems.loglikelihood(transformed_gradient_problem, θ_hat)

examples/logistic_regression.jl

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,3 +54,7 @@ transformed_gradient_problem = LogDensityProblems.ADgradient(:ForwardDiff,
5454
# β_hat:
5555

5656
β_hat = θ_hat[]
57+
58+
# Value of the log likelihood function evaluated at θ_hat:
59+
60+
MaximumLikelihoodProblems.loglikelihood(transformed_gradient_problem, θ_hat)

examples/multinomial_logistic_regression.jl

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,3 +70,7 @@ transformed_gradient_problem = LogDensityProblems.ADgradient(:ForwardDiff,
7070

7171
num_covariates = size(β_hat, 1)
7272
β_hat_with_base_class = hcat(zeros(num_covariates), β_hat)
73+
74+
# Value of the log likelihood function evaluated at θ_hat:
75+
76+
MaximumLikelihoodProblems.loglikelihood(transformed_gradient_problem, θ_hat)

src/MaximumLikelihoodProblems.jl

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,5 +5,6 @@ include("public.jl")
55
include("types.jl")
66

77
include("fit.jl")
8+
include("loglikelihood.jl")
89

910
end # module

src/fit.jl

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,8 @@ const default_tolerance = 1e-10
1111
"""
1212
fit(transformed_gradient_problem, theta_hat_initial; kwargs...)
1313
14+
Find the maximum likelihood estimatator for the parameters `theta`.
15+
1416
# Arguments
1517
- `transformed_gradient_problem`
1618
- `theta_hat_initial`
@@ -62,14 +64,14 @@ function _fit(transformed_gradient_problem,
6264
end
6365
for iter = 1:max_iterations
6466
theta_hat_old = theta_hat_new
65-
loglikelihood, gradient = LogDensityProblems.logdensity_and_gradient(transformed_gradient_problem,
66-
theta_hat_old)
67+
log_likelihood_value, gradient = LogDensityProblems.logdensity_and_gradient(transformed_gradient_problem,
68+
theta_hat_old)
6769
update = learning_rate * gradient
6870
theta_hat_new = theta_hat_old + update
6971
update_norm = sum(abs, update)
7072
if show_progress_meter
7173
showvalues = [(:iteration, iter),
72-
(:loglikelihood, loglikelihood)]
74+
(:loglikelihood, log_likelihood_value)]
7375
ProgressMeter.update!(progress,
7476
update_norm;
7577
showvalues = showvalues)

src/loglikelihood.jl

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
import LogDensityProblems
2+
import TransformVariables
3+
4+
"""
5+
loglikelihood(transformed_gradient_problem, theta_hat)
6+
7+
Return the value of the log likelihood function evaluated at `theta_hat`.
8+
9+
# Arguments
10+
- `transformed_gradient_problem`
11+
- `theta_hat`
12+
"""
13+
function loglikelihood(transformed_gradient_problem,
14+
theta)
15+
transformed_log_density = parent(transformed_gradient_problem)
16+
transformation = transformed_log_density.transformation
17+
theta_inversetransformed = TransformVariables.inverse(transformation,
18+
theta)
19+
result = _loglikelihood(transformed_gradient_problem,
20+
theta_inversetransformed)
21+
return result
22+
end
23+
24+
function _loglikelihood(transformed_gradient_problem,
25+
theta)
26+
log_likelihood_value, gradient = LogDensityProblems.logdensity_and_gradient(transformed_gradient_problem,
27+
theta)
28+
return log_likelihood_value
29+
end

0 commit comments

Comments
 (0)