@@ -9,7 +9,7 @@ truth_data_date <- "2023-09-01"
99# needed to create the aux data targets
1010end_date <- Sys.Date()
1111# Generically set the generation date to the next Wednesday (or today if it's Wednesday)
12- forecast_generation_date <- Sys.Date()
12+ forecast_generation_date <- seq.Date(as.Date( " 2024-11-20 " ), Sys.Date(), by = 7L )
1313very_latent_locations <- list (list (
1414 c(" source" ),
1515 c(" flusurv" , " ILI+" )
@@ -50,12 +50,6 @@ forecaster_fns <- list2(
5050 mutate(target_end_date = target_end_date + 3 )
5151 },
5252)
53- geo_forecasters_weights <- parse_prod_weights(here :: here(" flu_geo_exclusions.csv" ), forecast_generation_date )
54- geo_exclusions <- exclude_geos(geo_forecasters_weights )
55- if (nrow(geo_forecasters_weights %> % filter(forecast_date == forecast_generation_date )) == 0 ) {
56- geo_forecasters_weights
57- cli_abort(" there are no weights for the forecast date {forecast_generation_date}" )
58- }
5953
6054# This is needed to build the data archive
6155ref_time_values_ <- seq.Date(as.Date(" 2023-10-04" ), as.Date(" 2024-04-24" ), by = 7L )
@@ -107,6 +101,23 @@ rlang::list2(
107101 tar_map(
108102 values = tidyr :: expand_grid(tibble(forecast_generation_date = forecast_generation_date )),
109103 names = " forecast_generation_date" ,
104+ tar_target(
105+ name = geo_forecasters_weights ,
106+ command = {
107+ geo_forecasters_weights <- parse_prod_weights(here :: here(" flu_geo_exclusions.csv" ), forecast_generation_date )
108+ if (nrow(geo_forecasters_weights %> % filter(forecast_date == forecast_generation_date )) == 0 ) {
109+ cli_abort(" there are no weights for the forecast date {forecast_generation_date}" )
110+ }
111+ geo_forecasters_weights
112+ },
113+ cue = tar_cue(mode = " always" )
114+ ),
115+ tar_target(
116+ name = geo_exclusions ,
117+ command = {
118+ exclude_geos(geo_forecasters_weights )
119+ }
120+ ),
110121 tar_target(
111122 forecast_res ,
112123 command = {
@@ -162,6 +173,23 @@ rlang::list2(
162173 },
163174 cue = tar_cue(mode = " always" )
164175 ),
176+ tar_target(
177+ name = make_climate_submission_csv ,
178+ command = {
179+ forecasts <- forecast_res
180+ forecasts %> %
181+ filter(forecaster %in% c(" climate_base" , " climate_geo_agged" )) %> %
182+ group_by(geo_value , target_end_date , quantile ) %> %
183+ summarize(forecast_date = first(forecast_date ), value = mean(value , na.rm = TRUE ), .groups = " drop" ) %> %
184+ ungroup() %> %
185+ format_flusight(disease = " flu" ) %> %
186+ write_submission_file(
187+ get_forecast_reference_date(as.Date(forecast_generation_date )),
188+ file.path(submission_directory , " model-output/CMU-climatological-baseline" )
189+ )
190+ },
191+ cue = tar_cue(mode = " always" )
192+ ),
165193 tar_target(
166194 name = validate_result ,
167195 command = {
@@ -179,6 +207,22 @@ rlang::list2(
179207 },
180208 cue = tar_cue(mode = " always" )
181209 ),
210+ tar_target(
211+ name = validate_climate_result ,
212+ command = {
213+ make_climate_submission_csv
214+ # only validate if we're saving the result to a hub
215+ if (submission_directory != " cache" ) {
216+ validation <- validate_submission(
217+ submission_directory ,
218+ file_path = sprintf(" CMU-climatological-baseline/%s-CMU-climatological-baseline.csv" , get_forecast_reference_date(as.Date(forecast_generation_date ))))
219+ } else {
220+ validation <- " not validating when there is no hub (set submission_directory)"
221+ }
222+ validation
223+ },
224+ cue = tar_cue(mode = " always" )
225+ ),
182226 tar_target(
183227 name = truth_data ,
184228 command = {
0 commit comments