|
11 | 11 | #' |
12 | 12 | #' @details |
13 | 13 | #' Uses \code{\link[stats]{optim}} to maximize the log-likelihood. The score |
14 | | -#' function (gradient) is provided for gradient-based methods. The Hessian at |
15 | | -#' the MLE is computed for variance-covariance estimation. |
| 14 | +#' function (gradient) is computed from the same \code{\link{loglik}} closure |
| 15 | +#' via \code{\link[numDeriv]{grad}}, and the Hessian at the MLE via |
| 16 | +#' \code{\link[numDeriv]{hessian}}. One-parameter problems auto-upgrade from |
| 17 | +#' Nelder-Mead to BFGS with a warning, because Nelder-Mead is unreliable in |
| 18 | +#' one dimension. |
16 | 19 | #' |
| 20 | +#' @examples |
| 21 | +#' \donttest{ |
| 22 | +#' model <- dfr_series_md(components = list( |
| 23 | +#' dfr_exponential(0.1), dfr_exponential(0.2) |
| 24 | +#' )) |
| 25 | +#' set.seed(1) |
| 26 | +#' df <- rdata(model)(theta = c(0.1, 0.2), n = 200, tau = 10, p = 0) |
| 27 | +#' solver <- fit(model) |
| 28 | +#' result <- solver(df, par = c(0.15, 0.15)) |
| 29 | +#' coef(result) |
| 30 | +#' } |
17 | 31 | #' @importFrom generics fit |
18 | 32 | #' @importFrom likelihood.model fisher_mle |
19 | 33 | #' @importFrom stats optim |
|
22 | 36 | #' @export |
23 | 37 | fit.dfr_series_md <- function(object, ...) { |
24 | 38 | ll_fn <- loglik(object) |
25 | | - s_fn <- score(object) |
26 | | - H_fn <- hess_loglik(object) |
27 | 39 |
|
28 | 40 | function(df, par, |
29 | 41 | method = c("Nelder-Mead", "BFGS", "SANN", "CG", |
30 | 42 | "L-BFGS-B", "Brent"), |
31 | 43 | ..., control = list()) { |
32 | 44 | stopifnot(!is.null(par)) |
33 | | - defaults <- list(fnscale = -1) |
34 | | - control <- modifyList(defaults, control) |
| 45 | + user_picked_method <- !missing(method) |
35 | 46 | method <- match.arg(method) |
| 47 | + control <- modifyList(list(fnscale = -1), control) |
36 | 48 |
|
37 | | - if (length(par) == 1 && method == "Nelder-Mead") |
| 49 | + if (length(par) == 1 && method == "Nelder-Mead") { |
| 50 | + if (user_picked_method) |
| 51 | + warning("Nelder-Mead is unreliable for 1-parameter problems; ", |
| 52 | + "switching to BFGS.") |
38 | 53 | method <- "BFGS" |
| 54 | + } |
| 55 | + |
| 56 | + obj <- function(p) ll_fn(df, p) |
| 57 | + gr <- function(p) numDeriv::grad(obj, p) |
39 | 58 |
|
40 | 59 | sol <- optim( |
41 | | - par = par, |
42 | | - fn = function(p) ll_fn(df, p), |
43 | | - gr = if (method == "SANN") NULL else function(p) s_fn(df, p), |
44 | | - hessian = FALSE, |
45 | | - method = method, |
46 | | - control = control |
| 60 | + par = par, fn = obj, |
| 61 | + gr = if (method == "SANN") NULL else gr, |
| 62 | + hessian = FALSE, method = method, control = control |
47 | 63 | ) |
48 | 64 |
|
49 | | - hessian <- H_fn(df, sol$par) |
50 | | - score_at_mle <- s_fn(df, sol$par) |
51 | | - |
52 | 65 | fisher_mle( |
53 | 66 | par = sol$par, |
54 | 67 | loglik_val = sol$value, |
55 | | - hessian = hessian, |
56 | | - score_val = score_at_mle, |
| 68 | + hessian = numDeriv::hessian(obj, sol$par), |
| 69 | + score_val = numDeriv::grad(obj, sol$par), |
57 | 70 | nobs = nrow(df), |
58 | 71 | converged = (sol$convergence == 0) |
59 | 72 | ) |
|
0 commit comments