1717from .pull_api import GoogleHealthTrends , get_counts_states , get_counts_dma
1818from .map_values import derived_counts_from_dma
1919from .export import export_csv
20- from .constants import SIGNALS , RAW , SMOOTHED , MSA , HRR , STATE , DMA
20+ from .constants import (SIGNALS , RAW , SMOOTHED ,
21+ MSA , HRR , STATE , DMA ,
22+ PULL_START_DATE )
2123
2224
2325def run_module ():
@@ -39,12 +41,12 @@ def run_module():
3941 wip_signal = params ["wip_signal" ]
4042 cache_dir = params ["cache_dir" ]
4143
42- arch_diff = S3ArchiveDiffer (
43- cache_dir , export_dir ,
44- params ["bucket_name" ], "ght" ,
45- params ["aws_credentials" ])
46- arch_diff .update_cache ()
47- print (arch_diff )
44+ # arch_diff = S3ArchiveDiffer(
45+ # cache_dir, export_dir,
46+ # params["bucket_name"], "ght",
47+ # params["aws_credentials"])
48+ # arch_diff.update_cache()
49+ # print(arch_diff)
4850 # if missing start_date, set to today (GMT) minus 5 days
4951 if start_date == "" :
5052 now = datetime .datetime .now (datetime .timezone .utc )
@@ -69,10 +71,10 @@ def run_module():
6971
7072 # read data frame version of the data
7173 df_state = get_counts_states (
72- ght , start_date , end_date , static_dir = static_dir , data_dir = data_dir
74+ ght , PULL_START_DATE , end_date , static_dir = static_dir , data_dir = data_dir
7375 )
7476 df_dma = get_counts_dma (
75- ght , start_date , end_date , static_dir = static_dir , data_dir = data_dir
77+ ght , PULL_START_DATE , end_date , static_dir = static_dir , data_dir = data_dir
7678 )
7779 df_hrr , df_msa = derived_counts_from_dma (df_dma , static_dir = static_dir )
7880
@@ -81,27 +83,35 @@ def run_module():
8183 for signal in signal_names :
8284 if signal .endswith (SMOOTHED ):
8385 # export each geographic region, with both smoothed and unsmoothed data
84- export_csv (df_state , STATE , signal , smooth = True , receiving_dir = export_dir )
85- export_csv (df_dma , DMA , signal , smooth = True , receiving_dir = export_dir )
86- export_csv (df_hrr , HRR , signal , smooth = True , receiving_dir = export_dir )
87- export_csv (df_msa , MSA , signal , smooth = True , receiving_dir = export_dir )
86+ export_csv (df_state , STATE , signal , smooth = True ,
87+ start_date = start_date , receiving_dir = export_dir )
88+ export_csv (df_dma , DMA , signal , smooth = True ,
89+ start_date = start_date , receiving_dir = export_dir )
90+ export_csv (df_hrr , HRR , signal , smooth = True ,
91+ start_date = start_date , receiving_dir = export_dir )
92+ export_csv (df_msa , MSA , signal , smooth = True ,
93+ start_date = start_date , receiving_dir = export_dir )
8894 elif signal .endswith (RAW ):
89- export_csv (df_state , STATE , signal , smooth = False , receiving_dir = export_dir )
90- export_csv (df_dma , DMA , signal , smooth = False , receiving_dir = export_dir )
91- export_csv (df_hrr , HRR , signal , smooth = False , receiving_dir = export_dir )
92- export_csv (df_msa , MSA , signal , smooth = False , receiving_dir = export_dir )
93- # Diff exports, and make incremental versions
94- _ , common_diffs , new_files = arch_diff .diff_exports ()
95-
96- # Archive changed and new files only
97- to_archive = [f for f , diff in common_diffs .items () if diff is not None ]
98- to_archive += new_files
99- _ , fails = arch_diff .archive_exports (to_archive )
100-
101- # Filter existing exports to exclude those that failed to archive
102- succ_common_diffs = {f : diff for f , diff in common_diffs .items () if f not in fails }
103- arch_diff .filter_exports (succ_common_diffs )
104-
105- # Report failures: someone should probably look at them
106- for exported_file in fails :
107- print (f"Failed to archive '{ exported_file } '" )
95+ export_csv (df_state , STATE , signal , smooth = False ,
96+ start_date = start_date , receiving_dir = export_dir )
97+ export_csv (df_dma , DMA , signal , smooth = False ,
98+ start_date = start_date , receiving_dir = export_dir )
99+ export_csv (df_hrr , HRR , signal , smooth = False ,
100+ start_date = start_date , receiving_dir = export_dir )
101+ export_csv (df_msa , MSA , signal , smooth = False ,
102+ start_date = start_date , receiving_dir = export_dir )
103+ # # Diff exports, and make incremental versions
104+ # _, common_diffs, new_files = arch_diff.diff_exports()
105+ #
106+ # # Archive changed and new files only
107+ # to_archive = [f for f, diff in common_diffs.items() if diff is not None]
108+ # to_archive += new_files
109+ # _, fails = arch_diff.archive_exports(to_archive)
110+ #
111+ # # Filter existing exports to exclude those that failed to archive
112+ # succ_common_diffs = {f: diff for f, diff in common_diffs.items() if f not in fails}
113+ # arch_diff.filter_exports(succ_common_diffs)
114+ #
115+ # # Report failures: someone should probably look at them
116+ # for exported_file in fails:
117+ # print(f"Failed to archive '{exported_file}'")
0 commit comments