Skip to content

Commit 9d64746

Browse files
author
Dmytro Trotsko
committed
Added fluview endpoint support
1 parent f7ff02d commit 9d64746

File tree

9 files changed

+468
-30
lines changed

9 files changed

+468
-30
lines changed
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
covidcast_fluview_locations_mapping = {
2+
"nation:US": "nat",
3+
"hhs:1": "hhs1",
4+
"hhs:2": "hhs2",
5+
"hhs:3": "hhs3",
6+
"hhs:4": "hhs4",
7+
"hhs:5": "hhs5",
8+
"hhs:6": "hhs6",
9+
"hhs:7": "hhs7",
10+
"hhs:8": "hhs8",
11+
"hhs:9": "hhs9",
12+
"hhs:10": "hhs10",
13+
"state:AK": "AK",
14+
"state:AL": "AL",
15+
"state:AR": "AR",
16+
"state:AZ": "AZ",
17+
"state:CA": "CA",
18+
"state:CO": "CO",
19+
"state:CT": "CT",
20+
"state:DC": "DC",
21+
"state:DE": "DE",
22+
"state:FL": "FL",
23+
"state:GA": "GA",
24+
"state:HI": "HI",
25+
"state:IA": "IA",
26+
"state:ID": "ID",
27+
"state:IL": "IL",
28+
"state:IN": "IN",
29+
"state:KS": "KS",
30+
"state:KY": "KY",
31+
"state:LA": "LA",
32+
"state:MA": "MA",
33+
"state:MD": "MD",
34+
"state:ME": "ME",
35+
"state:MI": "MI",
36+
"state:MN": "MN",
37+
"state:MO": "MO",
38+
"state:MS": "MS",
39+
"state:MT": "MT",
40+
"state:NC": "NC",
41+
"state:ND": "ND",
42+
"state:NE": "NE",
43+
"state:NH": "NH",
44+
"state:NJ": "NJ",
45+
"state:NM": "NM",
46+
"state:NV": "NV",
47+
"state:NY": "NY",
48+
"state:OH": "OH",
49+
"state:OK": "OK",
50+
"state:OR": "OR",
51+
"state:PA": "PA",
52+
"state:RI": "RI",
53+
"state:SC": "SC",
54+
"state:SD": "SD",
55+
"state:TN": "TN",
56+
"state:TX": "TX",
57+
"state:UT": "UT",
58+
"state:VA": "VA",
59+
"state:VT": "VT",
60+
"state:WA": "WA",
61+
"state:WI": "WI",
62+
"state:WV": "WV",
63+
"state:WY": "WY",
64+
}

src/alternative_interface/utils.py

Lines changed: 92 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
get_epiweek,
1313
group_by_property,
1414
)
15+
from alternative_interface.helper import covidcast_fluview_locations_mapping
1516

1617

1718
def epiweeks_in_date_range(start_date_str: str, end_date_str: str):
@@ -72,6 +73,7 @@ def days_in_date_range(start_date_str: str, end_date_str: str):
7273
def get_available_geos(indicators):
7374
geo_values = []
7475
grouped_indicators = group_by_property(indicators, "data_source")
76+
sources = grouped_indicators.keys()
7577
for data_source, indicators in grouped_indicators.items():
7678
indicators_str = ",".join(indicator["name"] for indicator in indicators)
7779
response = requests.get(
@@ -98,6 +100,27 @@ def get_available_geos(indicators):
98100
.prefetch_related("geo_level")
99101
.order_by("level")
100102
]
103+
if "fluview" in sources:
104+
geographic_granularities.extend(
105+
[
106+
{
107+
"id": f"{geo_unit.geo_level.name}:{geo_unit.geo_id}",
108+
"geoType": geo_unit.geo_level.name,
109+
"text": geo_unit.display_name,
110+
"geoTypeDisplayName": geo_unit.geo_level.display_name,
111+
}
112+
for geo_unit in GeographyUnit.objects.filter(
113+
geo_level__name__in=[
114+
"census-region",
115+
"us-territory",
116+
"us-city",
117+
"ny_minus_jfk",
118+
]
119+
)
120+
.prefetch_related("geo_level")
121+
.order_by("level")
122+
]
123+
)
101124
grouped_geographic_granularities = group_by_property(
102125
geographic_granularities, "geoTypeDisplayName"
103126
)
@@ -113,26 +136,57 @@ def get_available_geos(indicators):
113136

114137

115138
def get_covidcast_data(indicator, start_date, end_date, geo, api_key):
116-
if indicator["_endpoint"] == "covidcast":
117-
time_values = f"{start_date}--{end_date}"
118-
if indicator["time_type"] == "week":
119-
start_day, end_day = get_epiweek(start_date, end_date)
120-
time_values = f"{start_day}-{end_day}"
121-
geo_type, geo_value = geo.split(":")
122-
params = {
123-
"time_type": indicator["time_type"],
124-
"time_values": time_values,
125-
"data_source": indicator["data_source"],
126-
"signal": indicator["name"],
127-
"geo_type": geo_type,
128-
"geo_values": geo_value.lower(),
129-
"api_key": api_key if api_key else settings.EPIDATA_API_KEY,
130-
}
131-
response = requests.get(f"{settings.EPIDATA_URL}covidcast", params=params)
132-
if response.status_code == 200:
133-
response_data = response.json()
134-
if len(response_data["epidata"]):
135-
return response_data["epidata"]
139+
time_values = f"{start_date}--{end_date}"
140+
if indicator["time_type"] == "week":
141+
start_day, end_day = get_epiweek(start_date, end_date)
142+
time_values = f"{start_day}-{end_day}"
143+
geo_type, geo_value = geo.split(":")
144+
params = {
145+
"time_type": indicator["time_type"],
146+
"time_values": time_values,
147+
"data_source": indicator["data_source"],
148+
"signal": indicator["name"],
149+
"geo_type": geo_type,
150+
"geo_values": geo_value.lower(),
151+
"api_key": api_key if api_key else settings.EPIDATA_API_KEY,
152+
}
153+
response = requests.get(f"{settings.EPIDATA_URL}covidcast", params=params)
154+
if response.status_code == 200:
155+
response_data = response.json()
156+
if len(response_data["epidata"]):
157+
return response_data["epidata"]
158+
return []
159+
160+
161+
def get_fluview_data(indicator, geo, start_date, end_date, api_key):
162+
region = None
163+
try:
164+
region = covidcast_fluview_locations_mapping[geo]
165+
except KeyError:
166+
region = geo.split(":")[1]
167+
time_values = f"{start_date}--{end_date}"
168+
if indicator["time_type"] == "week":
169+
start_day, end_day = get_epiweek(start_date, end_date)
170+
time_values = f"{start_day}-{end_day}"
171+
params = {
172+
"regions": region,
173+
"epiweeks": time_values,
174+
"api_key": api_key if api_key else settings.EPIDATA_API_KEY,
175+
}
176+
print(indicator)
177+
response = requests.get(f"{settings.EPIDATA_URL}{indicator['data_source']}", params=params)
178+
if response.status_code == 200:
179+
data = response.json()
180+
if len(data["epidata"]):
181+
return [
182+
{
183+
"time_value": el["epiweek"],
184+
"value": el[indicator["name"]],
185+
"signal": indicator["name"],
186+
"time_type": indicator["time_type"],
187+
}
188+
for el in data["epidata"]
189+
]
136190
return []
137191

138192

@@ -379,20 +433,30 @@ def get_chart_data(indicators, geography):
379433
chart_data["initialViewEnd"] = end_date
380434

381435
# Fetch data from a wider range (2020 to today) for scrolling
382-
data_start_date = "2010-01-01"
436+
data_start_date = "1990-01-01"
383437
data_end_date = today.strftime("%Y-%m-%d")
384438

385439
for indicator in indicators:
386440
title = generate_epivis_custom_title(indicator, geo_display_name)
387441
color = generate_random_color()
388442
indicator_time_type = indicator.get("time_type", "week")
389-
data = get_covidcast_data(
390-
indicator,
391-
data_start_date,
392-
data_end_date,
393-
geography,
394-
settings.EPIDATA_API_KEY,
395-
)
443+
data = None
444+
if indicator["_endpoint"] == "covidcast":
445+
data = get_covidcast_data(
446+
indicator,
447+
data_start_date,
448+
data_end_date,
449+
geography,
450+
settings.EPIDATA_API_KEY,
451+
)
452+
elif indicator["data_source"] in ["fluview", "fluview_clinical"]:
453+
data = get_fluview_data(
454+
indicator,
455+
geography,
456+
data_start_date,
457+
data_end_date,
458+
settings.EPIDATA_API_KEY,
459+
)
396460
if data:
397461
# Prepare series with full data range for scrolling
398462
series = prepare_chart_series_multi(
Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
from django.core.management.base import BaseCommand
2+
from django.apps import apps
3+
from django.db import connection
4+
5+
6+
class Command(BaseCommand):
7+
help = "Reset AUTO_INCREMENT values for all database tables based on current max IDs"
8+
9+
def add_arguments(self, parser):
10+
parser.add_argument(
11+
'--dry-run',
12+
action='store_true',
13+
help='Show what would be done without actually making changes',
14+
)
15+
16+
def handle(self, *args, **options):
17+
dry_run = options['dry_run']
18+
19+
if dry_run:
20+
self.stdout.write(self.style.WARNING("DRY RUN MODE - No changes will be made\n"))
21+
22+
models_processed = 0
23+
models_skipped = 0
24+
25+
# Get all models from installed apps
26+
for app_config in apps.get_app_configs():
27+
for model in app_config.get_models():
28+
# Skip proxy models (they use the same table as their parent)
29+
if model._meta.proxy:
30+
continue
31+
32+
# Skip models without a primary key or with non-auto-increment PK
33+
pk_field = model._meta.pk
34+
if not pk_field or not pk_field.auto_created:
35+
continue
36+
37+
table_name = model._meta.db_table
38+
39+
try:
40+
with connection.cursor() as cursor:
41+
# Get the current max ID
42+
cursor.execute(f"SELECT MAX(`{pk_field.column}`) FROM `{table_name}`")
43+
result = cursor.fetchone()
44+
max_id = result[0] if result[0] is not None else 0
45+
46+
# Calculate the next AUTO_INCREMENT value
47+
next_auto_increment = max_id + 1 if max_id > 0 else 1
48+
49+
if dry_run:
50+
self.stdout.write(
51+
f"Would reset {model._meta.label}: "
52+
f"table `{table_name}` AUTO_INCREMENT to {next_auto_increment} "
53+
f"(current max ID: {max_id})"
54+
)
55+
else:
56+
# Reset AUTO_INCREMENT
57+
cursor.execute(
58+
f"ALTER TABLE `{table_name}` AUTO_INCREMENT = {next_auto_increment}"
59+
)
60+
self.stdout.write(
61+
self.style.SUCCESS(
62+
f"Reset {model._meta.label}: "
63+
f"table `{table_name}` AUTO_INCREMENT to {next_auto_increment} "
64+
f"(current max ID: {max_id})"
65+
)
66+
)
67+
68+
models_processed += 1
69+
70+
except Exception as e:
71+
self.stdout.write(
72+
self.style.ERROR(
73+
f"Error processing {model._meta.label} ({table_name}): {str(e)}"
74+
)
75+
)
76+
models_skipped += 1
77+
78+
self.stdout.write(
79+
self.style.SUCCESS(
80+
f"\nCompleted: {models_processed} tables processed"
81+
)
82+
)
83+
if models_skipped > 0:
84+
self.stdout.write(
85+
self.style.WARNING(
86+
f"{models_skipped} tables skipped due to errors"
87+
)
88+
)
89+

0 commit comments

Comments
 (0)