@@ -45,6 +45,30 @@ def _epiweek_label(w: Week) -> str:
4545 return f"{ w .year } -W{ w .week :02d} "
4646
4747
48+ def _day_key (d : datetime .date ) -> int :
49+ # Matches API time_value format YYYYMMDD, e.g. 20240115
50+ return d .year * 10000 + d .month * 100 + d .day
51+
52+
53+ def _day_label (d : datetime .date ) -> str :
54+ return d .strftime ("%Y-%m-%d" )
55+
56+
57+ def days_in_date_range (start_date_str : str , end_date_str : str ):
58+ """Generate all days in the date range."""
59+ start_date = datetime .strptime (start_date_str , "%Y-%m-%d" ).date ()
60+ end_date = datetime .strptime (end_date_str , "%Y-%m-%d" ).date ()
61+ if end_date < start_date :
62+ start_date , end_date = end_date , start_date
63+
64+ days = []
65+ d = start_date
66+ while d <= end_date :
67+ days .append (d )
68+ d += timedelta (days = 1 )
69+ return days
70+
71+
4872def get_available_geos (indicators ):
4973 geo_values = []
5074 grouped_indicators = group_by_property (indicators , "data_source" )
@@ -117,16 +141,60 @@ def prepare_chart_series_multi(
117141 start_date : str ,
118142 end_date : str ,
119143 series_by : Union [str , Iterable [str ]] = "signal" ,
144+ time_type : str = None ,
120145):
121146 """
122- api_rows: list of dicts with at least 'time_value' (YYYYWW) and 'value'
147+ api_rows: list of dicts with at least 'time_value' (YYYYWW or YYYYMMDD ) and 'value'
123148 series_by: a field name (e.g., 'signal' or 'geo_value') or an iterable of fields (e.g., ('signal','geo_value'))
124- returns: { labels: [...], datasets: [{ label, data }, ...] }
149+ time_type: 'week' or 'day' - determines how to interpret time_value
150+ returns: { labels: [...], dayLabels: [...], timePositions: [...], datasets: [{ label, data, timeType }, ...] }
125151 """
126- # 1) Build aligned epiweek axis
152+ # 1) Build unified timeline with both days and weeks
153+ days = days_in_date_range (start_date , end_date )
127154 weeks = epiweeks_in_date_range (start_date , end_date )
128- labels = [_epiweek_label (w ) for w in weeks ]
129- keys = [_epiweek_key (w ) for w in weeks ]
155+
156+ # Create a unified timeline: each position can be either a day or a week
157+ # We'll use day positions as the base, and mark week positions
158+ day_keys = [_day_key (d ) for d in days ]
159+ week_keys = [_epiweek_key (w ) for w in weeks ]
160+
161+ # Create mapping: week_key -> list of day_keys in that week
162+ week_to_days = {}
163+ for w in weeks :
164+ week_start = w .startdate ()
165+ week_end = w .enddate ()
166+ week_key = _epiweek_key (w )
167+ week_to_days [week_key ] = []
168+ for d in days :
169+ if week_start <= d <= week_end :
170+ week_to_days [week_key ].append (_day_key (d ))
171+
172+ # Build labels and time positions
173+ # timePositions will indicate: 'day' or 'week' for each position
174+ labels = [] # Primary labels (weeks)
175+ day_labels = [] # Secondary labels (days)
176+ time_positions = [] # 'day' or 'week' for each position
177+
178+ # Use days as the base timeline
179+ for d in days :
180+ day_key = _day_key (d )
181+ day_labels .append (_day_label (d ))
182+
183+ # Check if this day is the start of a week
184+ w = Week .fromdate (d )
185+ week_key = _epiweek_key (w )
186+ if week_key in week_keys and d == w .startdate ():
187+ labels .append (_epiweek_label (w ))
188+ time_positions .append ("week" )
189+ else :
190+ # Check if any week contains this day
191+ is_in_week = any (day_key in week_to_days .get (wk , []) for wk in week_keys )
192+ if is_in_week :
193+ labels .append ("" ) # Empty label for days within weeks
194+ time_positions .append ("day" )
195+ else :
196+ labels .append ("" )
197+ time_positions .append ("day" )
130198
131199 # 2) Group rows by series key
132200 if isinstance (series_by , (list , tuple )):
@@ -145,37 +213,114 @@ def series_key_of(row):
145213 def series_label_of (key ):
146214 return str (key )
147215
216+ # 3) Process data based on time_type
148217 series_to_values : dict [object , dict [int , float ]] = {}
218+ detected_time_type = time_type
219+
149220 for row in api_rows :
150221 tv = row .get ("time_value" )
151- # If the API returned daily values (YYYYMMDD), convert to epiweek key (YYYYWW)
152- if tv is not None and (row .get ("time_type" ) == "day" ):
222+ row_time_type = row .get ("time_type" ) or time_type
223+
224+ if tv is None :
225+ continue
226+
227+ # Determine time_type if not provided
228+ if detected_time_type is None :
229+ # Try to detect from time_value format
230+ tv_str = str (tv )
231+ if len (tv_str ) == 8 : # YYYYMMDD format
232+ detected_time_type = "day"
233+ elif len (tv_str ) == 6 : # YYYYWW format
234+ detected_time_type = "week"
235+ else :
236+ detected_time_type = row_time_type or "week"
237+
238+ # Use row's time_type if available, otherwise use detected
239+ actual_time_type = row_time_type or detected_time_type
240+
241+ # Convert time_value to appropriate key
242+ if actual_time_type == "day" :
153243 try :
154244 tv_str = str (tv )
155- year = int (tv_str [0 :4 ])
156- month = int (tv_str [4 :6 ])
157- day = int (tv_str [6 :8 ])
158- d = datetime (year , month , day ).date ()
159- w = Week .fromdate (d )
160- tv = _epiweek_key (w )
245+ if len (tv_str ) == 8 :
246+ year = int (tv_str [0 :4 ])
247+ month = int (tv_str [4 :6 ])
248+ day = int (tv_str [6 :8 ])
249+ d = datetime (year , month , day ).date ()
250+ tv = _day_key (d )
251+ else :
252+ continue
161253 except Exception :
162- # Skip malformed dates
163- tv = None
164- if tv is None :
165- continue
254+ continue
255+ else : # week
256+ try :
257+ tv_str = str (tv )
258+ if len (tv_str ) == 6 :
259+ year = int (tv_str [0 :4 ])
260+ week = int (tv_str [4 :6 ])
261+ w = Week (year , week )
262+ tv = _epiweek_key (w )
263+ elif len (tv_str ) == 8 :
264+ # Convert day to week
265+ year = int (tv_str [0 :4 ])
266+ month = int (tv_str [4 :6 ])
267+ day = int (tv_str [6 :8 ])
268+ d = datetime (year , month , day ).date ()
269+ w = Week .fromdate (d )
270+ tv = _epiweek_key (w )
271+ else :
272+ continue
273+ except Exception :
274+ continue
275+
166276 skey = series_key_of (row )
167277 if skey not in series_to_values :
168278 series_to_values [skey ] = {}
169279 # last one wins if duplicates
170280 series_to_values [skey ][tv ] = row .get ("value" , None )
171281
172- # 3 ) Align each series to the epiweek axis, filling with None
282+ # 4 ) Align each series to the unified timeline (day-based)
173283 datasets = []
174284 for skey , tv_map in series_to_values .items ():
175- data = [tv_map .get (k , None ) for k in keys ]
176- datasets .append ({"label" : series_label_of (skey ), "data" : data })
285+ data = []
286+ # Determine if this series is weekly or daily based on its keys
287+ series_keys = list (tv_map .keys ())
288+ series_time_type = detected_time_type or "week"
289+
290+ if series_keys :
291+ # Check if keys match day format (8 digits) or week format (6 digits)
292+ first_key = series_keys [0 ]
293+ if first_key >= 10000000 : # Day key (YYYYMMDD >= 10000000)
294+ series_time_type = "day"
295+ # Map directly to day positions
296+ for day_key in day_keys :
297+ data .append (tv_map .get (day_key , None ))
298+ else : # Week key (YYYYWW < 10000000)
299+ series_time_type = "week"
300+ # Map week values to day positions
301+ # For each day, check if it's the start of a week that has data
302+ for d in days :
303+ w = Week .fromdate (d )
304+ week_key = _epiweek_key (w )
305+ # If this is the start of the week and we have data for this week
306+ if d == w .startdate () and week_key in tv_map :
307+ data .append (tv_map .get (week_key , None ))
308+ else :
309+ # For other days in the week, use None
310+ data .append (None )
311+ else :
312+ data = [None ] * len (day_keys )
313+
314+ datasets .append (
315+ {"label" : series_label_of (skey ), "data" : data , "timeType" : series_time_type }
316+ )
177317
178- return {"labels" : labels , "datasets" : datasets }
318+ return {
319+ "labels" : labels ,
320+ "dayLabels" : day_labels ,
321+ "timePositions" : time_positions ,
322+ "datasets" : datasets ,
323+ }
179324
180325
181326def normalize_dataset (data ):
@@ -184,46 +329,78 @@ def normalize_dataset(data):
184329 Preserves None values for missing data.
185330 """
186331 # Filter out None values for min/max calculation
187- numeric_values = [v for v in data if v is not None and not (isinstance (v , float ) and (v != v or v in (float ('inf' ), float ('-inf' ))))]
188-
332+ numeric_values = [
333+ v
334+ for v in data
335+ if v is not None
336+ and not (
337+ isinstance (v , float ) and (v != v or v in (float ("inf" ), float ("-inf" )))
338+ )
339+ ]
340+
189341 if not numeric_values :
190342 return data # Return as-is if no valid numeric values
191-
343+
192344 min_val = min (numeric_values )
193345 max_val = max (numeric_values )
194346 range_val = (max_val - min_val ) or 1 # Avoid division by zero
195-
347+
196348 # Normalize each value
197349 normalized = []
198350 for value in data :
199351 if value is None :
200352 normalized .append (None )
201- elif isinstance (value , float ) and (value != value or value in (float ('inf' ), float ('-inf' ))):
353+ elif isinstance (value , float ) and (
354+ value != value or value in (float ("inf" ), float ("-inf" ))
355+ ):
202356 normalized .append (None )
203357 else :
204358 normalized .append (((value - min_val ) / range_val ) * 100 )
205-
359+
206360 return normalized
207361
208362
209363def get_chart_data (indicators , geography ):
210- chart_data = {"labels" : [], "datasets" : []}
364+ chart_data = {"labels" : [], "dayLabels" : [], "timePositions" : [], " datasets" : []}
211365 geo_type , geo_value = geography .split (":" )
212366 geo_display_name = GeographyUnit .objects .get (
213367 geo_level__name = geo_type , geo_id = geo_value
214368 ).display_name
369+
370+ # Calculate date range: last 12 months from today, but fetch data from 2020
371+ today = datetime .now ().date ()
372+ two_years_ago = today - timedelta (days = 730 )
373+ # Format dates as strings
374+ end_date = today .strftime ("%Y-%m-%d" )
375+ start_date = two_years_ago .strftime ("%Y-%m-%d" )
376+
377+ # Store the initial view range (last 12 months)
378+ chart_data ["initialViewStart" ] = start_date
379+ chart_data ["initialViewEnd" ] = end_date
380+
381+ # Fetch data from a wider range (2020 to today) for scrolling
382+ data_start_date = "2010-01-01"
383+ data_end_date = today .strftime ("%Y-%m-%d" )
384+
215385 for indicator in indicators :
216386 title = generate_epivis_custom_title (indicator , geo_display_name )
217387 color = generate_random_color ()
388+ indicator_time_type = indicator .get ("time_type" , "week" )
218389 data = get_covidcast_data (
219- indicator , "2010-01-01" , "2025-01-31" , geography , settings .EPIDATA_API_KEY
390+ indicator ,
391+ data_start_date ,
392+ data_end_date ,
393+ geography ,
394+ settings .EPIDATA_API_KEY ,
220395 )
221396 if data :
397+ # Prepare series with full data range for scrolling
222398 series = prepare_chart_series_multi (
223399 data ,
224- "2020-01-01" ,
225- "2025-01-31" ,
400+ data_start_date ,
401+ data_end_date ,
226402 series_by = "signal" , # label per indicator (adjust to ("signal","geo_value") if needed)
403+ time_type = indicator_time_type ,
227404 )
228405 # Apply readable label, color, and normalize data for each dataset
229406 for ds in series ["datasets" ]:
@@ -236,5 +413,7 @@ def get_chart_data(indicators, geography):
236413 # Initialize labels once; assume same date range for all
237414 if not chart_data ["labels" ]:
238415 chart_data ["labels" ] = series ["labels" ]
416+ chart_data ["dayLabels" ] = series ["dayLabels" ]
417+ chart_data ["timePositions" ] = series ["timePositions" ]
239418 chart_data ["datasets" ].extend (series ["datasets" ])
240419 return chart_data
0 commit comments