7
7
# ' @param pattern string to search in the forecaster name.
8
8
# '
9
9
# ' @export
10
- forecaster_lookup <- function (pattern ) {
11
- if (! exists(" g_forecaster_params_grid" )) {
12
- cli :: cli_warn(" Reading `forecaster_params_grid` target. If it's not up to date, results will be off.
13
- Update with `tar_make(g_forecaster_params_grid)`." )
14
- forecaster_params_grid <- tar_read_raw(" forecaster_params_grid" )
15
- } else {
16
- forecaster_params_grid <- g_forecaster_params_grid
10
+ forecaster_lookup <- function (pattern , forecaster_params_grid = NULL ) {
11
+ if (is.null(forecaster_params_grid )) {
12
+ if (! exists(" g_forecaster_params_grid" )) {
13
+ cli :: cli_warn(
14
+ " Reading `forecaster_params_grid` target. If it's not up to date, results will be off.
15
+ Update with `tar_make(g_forecaster_params_grid)`."
16
+ )
17
+ forecaster_params_grid <- tar_read_raw(" forecaster_params_grid" )
18
+ } else {
19
+ forecaster_params_grid <- forecaster_params_grid %|| % g_forecaster_params_grid
20
+ }
17
21
}
18
22
19
23
# Remove common prefix for convenience.
@@ -24,10 +28,10 @@ forecaster_lookup <- function(pattern) {
24
28
pattern <- gsub(" forecaster_" , " " , pattern )
25
29
}
26
30
27
- out <- forecaster_params_grid %> % filter(.data $ id == pattern )
31
+ out <- forecaster_params_grid %> % filter(grepl( pattern , .data $ id ) )
28
32
if (nrow(out ) > 0 ) {
29
33
out %> % glimpse()
30
- return (invisible ( out ) )
34
+ return (out )
31
35
}
32
36
}
33
37
@@ -84,11 +88,12 @@ make_forecaster_grid <- function(tib, family) {
84
88
unname() %> %
85
89
lapply(as.list )
86
90
# for whatever reason, trainer ends up being a list of lists, which we do not want
87
- params_list %<> % lapply(function (x ) {
88
- x $ trainer <- x $ trainer [[1 ]]
89
- x $ lags <- x $ lags [[1 ]]
90
- x
91
- })
91
+ params_list %<> %
92
+ lapply(function (x ) {
93
+ x $ trainer <- x $ trainer [[1 ]]
94
+ x $ lags <- x $ lags [[1 ]]
95
+ x
96
+ })
92
97
93
98
if (length(params_list ) == 0 ) {
94
99
out <- tibble(
@@ -144,9 +149,10 @@ make_ensemble_grid <- function(tib) {
144
149
# '
145
150
# ' @export
146
151
get_exclusions <- function (
147
- date ,
148
- forecaster ,
149
- exclusions_json = here :: here(" scripts" , " geo_exclusions.json" )) {
152
+ date ,
153
+ forecaster ,
154
+ exclusions_json = here :: here(" scripts" , " geo_exclusions.json" )
155
+ ) {
150
156
if (! file.exists(exclusions_json )) {
151
157
return (" " )
152
158
}
@@ -182,8 +188,14 @@ data_substitutions <- function(dataset, substitutions_path, forecast_generation_
182
188
parse_prod_weights <- function (filename , forecast_date_int , forecaster_fn_names ) {
183
189
forecast_date_val <- as.Date(forecast_date_int )
184
190
all_states <- c(
185
- unique(readr :: read_csv(" https://raw.githubusercontent.com/cmu-delphi/covidcast-indicators/refs/heads/main/_delphi_utils_python/delphi_utils/data/2020/state_pop.csv" , show_col_types = FALSE )$ state_id ),
186
- " usa" , " us"
191
+ unique(
192
+ readr :: read_csv(
193
+ " https://raw.githubusercontent.com/cmu-delphi/covidcast-indicators/refs/heads/main/_delphi_utils_python/delphi_utils/data/2020/state_pop.csv" ,
194
+ show_col_types = FALSE
195
+ )$ state_id
196
+ ),
197
+ " usa" ,
198
+ " us"
187
199
)
188
200
all_prod_weights <- readr :: read_csv(filename , comment = " #" , show_col_types = FALSE )
189
201
# if we haven't set specific weights, use the overall defaults
@@ -227,7 +239,10 @@ exclude_geos <- function(geo_forecasters_weights) {
227
239
`%nin%` <- function (x , y ) ! (x %in% y )
228
240
229
241
get_population_data <- function () {
230
- readr :: read_csv(" https://raw.githubusercontent.com/cmu-delphi/covidcast-indicators/refs/heads/main/_delphi_utils_python/delphi_utils/data/2020/state_pop.csv" , show_col_types = FALSE ) %> %
242
+ readr :: read_csv(
243
+ " https://raw.githubusercontent.com/cmu-delphi/covidcast-indicators/refs/heads/main/_delphi_utils_python/delphi_utils/data/2020/state_pop.csv" ,
244
+ show_col_types = FALSE
245
+ ) %> %
231
246
rename(population = pop ) %> %
232
247
# Add a row for the United States
233
248
bind_rows(
@@ -244,7 +259,11 @@ filter_forecast_geos <- function(forecasts, truth_data) {
244
259
# 1. Filter out forecasts that trend down
245
260
tibble(
246
261
geo_value = subset_geos ,
247
- trend_down = map(subset_geos , ~ lm(value ~ target_end_date , data = forecasts %> % filter(geo_value == .x ))$ coefficients [2 ] < 0 ) %> % unlist()
262
+ trend_down = map(
263
+ subset_geos ,
264
+ ~ lm(value ~ target_end_date , data = forecasts %> % filter(geo_value == .x ))$ coefficients [2 ] < 0
265
+ ) %> %
266
+ unlist()
248
267
) %> %
249
268
filter(trend_down ) %> %
250
269
pull(geo_value ),
@@ -267,7 +286,11 @@ filter_forecast_geos <- function(forecasts, truth_data) {
267
286
geo_value = subset_geos
268
287
) %> %
269
288
left_join(
270
- forecasts %> % filter(near(quantile , 0.75 ), target_end_date == MMWRweek2Date(epiyear(forecast_date ), epiweek(forecast_date )) + 6 ),
289
+ forecasts %> %
290
+ filter(
291
+ near(quantile , 0.75 ),
292
+ target_end_date == MMWRweek2Date(epiyear(forecast_date ), epiweek(forecast_date )) + 6
293
+ ),
271
294
by = " geo_value"
272
295
) %> %
273
296
left_join(
@@ -276,7 +299,8 @@ filter_forecast_geos <- function(forecasts, truth_data) {
276
299
) %> %
277
300
filter(value > = pp ) %> %
278
301
pull(geo_value )
279
- ) %> % unique()
302
+ ) %> %
303
+ unique()
280
304
}
281
305
282
306
# ' Write a submission file. pred is assumed to be in the correct submission format.
@@ -359,7 +383,13 @@ update_site <- function(sync_to_s3 = TRUE) {
359
383
disease <- file_parts [2 ]
360
384
generation_date <- file_parts [5 ]
361
385
362
- report_link <- sprintf(" - [%s Forecasts %s, Rendered %s](%s)" , str_to_title(disease ), date , generation_date , file_name )
386
+ report_link <- sprintf(
387
+ " - [%s Forecasts %s, Rendered %s](%s)" ,
388
+ str_to_title(disease ),
389
+ date ,
390
+ generation_date ,
391
+ file_name
392
+ )
363
393
364
394
# Insert into Production Reports section, skipping a line
365
395
prod_reports_index <- which(grepl(" ## Production Reports" , report_md_content )) + 1
@@ -401,7 +431,9 @@ update_site <- function(sync_to_s3 = TRUE) {
401
431
writeLines(report_md_content , report_md_path )
402
432
403
433
# Convert the markdown file to HTML
404
- system(" pandoc reports/report.md -s -o reports/index.html --css=reports/style.css --mathjax='https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js' --metadata pagetitle='Delphi Reports'" )
434
+ system(
435
+ " pandoc reports/report.md -s -o reports/index.html --css=reports/style.css --mathjax='https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js' --metadata pagetitle='Delphi Reports'"
436
+ )
405
437
}
406
438
407
439
# ' Delete unused reports from the S3 bucket.
0 commit comments