Skip to content
19 changes: 0 additions & 19 deletions docs/contributing/contributing.md
Original file line number Diff line number Diff line change
Expand Up @@ -98,25 +98,6 @@ More information about pre-commit hooks can be found [here](https://pre-commit.c

Install Black:

We use ruff to enforce the code style and code formatting. You can run it with:

```bash
pipenv run ruff check .
pipenv run ruff format .
```

To ensure that the code is formatted correctly, we use a pre-commit hook that runs Ruff before every commit.
Run the following once to enable hooks in your local repo:

```bash
pipenv run pre-commit install
# optional: run on all files
pipenv run pre-commit run --all-files
```

Hence, you will need to make sure that the code is formatted correctly before committing your changes; otherwise, the commit will fail.
More information about pre-commit hooks can be found [here](https://pre-commit.com/).

```bash
pipenv install black
```
Expand Down
10 changes: 3 additions & 7 deletions pages/lib/charts_data_explorer.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
from math import ceil, floor

import numpy as np
import math
import plotly.express as px
import plotly.graph_objects as go
from pages.lib.utils import get_max_min_value
from pages.lib.global_scheme import template, mapping_dictionary, month_lst
from pages.lib.global_column_names import ColNames

Expand Down Expand Up @@ -43,8 +41,7 @@ def custom_heatmap(df, global_local, var, time_filter_info, data_filter_info, si
range_z = var_range
else:
# Set maximum and minimum according to data
data_max = 5 * ceil(df[var].max() / 5)
data_min = 5 * floor(df[var].min() / 5)
data_max, data_min = get_max_min_value(df[var])
range_z = [data_min, data_max]

title = var_name + " (" + var_unit + ")"
Expand Down Expand Up @@ -121,8 +118,7 @@ def three_var_graph(

if global_local != "global":
# Set maximum and minimum according to data
data_max = 5 * math.ceil(df[var].max() / 5)
data_min = 5 * math.floor(df[var].min() / 5)
data_max, data_min = get_max_min_value(df[var])
var_range = [data_min, data_max]

color_scale = var_color
Expand Down
9 changes: 4 additions & 5 deletions pages/lib/charts_sun.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
from datetime import timedelta
from math import ceil, cos, floor, radians
from math import cos, radians

import numpy as np
import pandas as pd
import plotly.graph_objects as go

from config import UnitSystem
from pages.lib.utils import get_max_min_value
from pages.lib.global_scheme import (
template,
mapping_dictionary,
Expand Down Expand Up @@ -135,8 +136,7 @@ def polar_graph(df, meta, global_local, var, si_ip):
range_z = var_range
else:
# Set maximum and minimum according to data
data_max = 5 * ceil(solpos[var].max() / 5)
data_min = 5 * floor(solpos[var].min() / 5)
data_max, data_min = get_max_min_value(solpos[var])
range_z = [data_min, data_max]

tz = "UTC"
Expand Down Expand Up @@ -348,8 +348,7 @@ def custom_cartesian_solar(df, meta, global_local, var, si_ip):
range_z = var_range
else:
# Set maximum and minimum according to data
data_max = 5 * ceil(df[var].max() / 5)
data_min = 5 * floor(df[var].min() / 5)
data_max, data_min = get_max_min_value(df[var])
range_z = [data_min, data_max]

if var == "None":
Expand Down
151 changes: 101 additions & 50 deletions pages/lib/extract_df.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,66 @@ def get_location_info(lst, file_name):
return location_info


# ==== Unified UTCI computation and binning ====
UTCI_BINS = [-999, -40, -27, -13, 0, 9, 26, 32, 38, 46, 999]
UTCI_LABELS = [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4]


def utci_calc(
df: pd.DataFrame,
t_air_col: str,
t_rad_col: str,
wind_col: str,
rh_col: str = ColNames.RH,
) -> pd.Series:
"""Call utci() using values from df columns."""
return utci(df[t_air_col], df[t_rad_col], df[wind_col], df[rh_col])


def add_utci_variants(df: pd.DataFrame) -> pd.DataFrame:
"""
Generate the four UTCI variants:
- noSun_Wind : DBT + DBT + wind_speed_utci
- noSun_noWind : DBT + DBT + wind_speed_utci_0
- Sun_Wind : DBT + MRT + wind_speed_utci
- Sun_noWind : DBT + MRT + wind_speed_utci_0
"""
recipes = {
ColNames.UTCI_NO_SUN_WIND: (
ColNames.DBT,
ColNames.DBT,
ColNames.WIND_SPEED_UTCI,
),
ColNames.UTCI_NO_SUN_NO_WIND: (
ColNames.DBT,
ColNames.DBT,
ColNames.WIND_SPEED_UTCI_0,
),
ColNames.UTCI_SUN_WIND: (ColNames.DBT, ColNames.MRT, ColNames.WIND_SPEED_UTCI),
ColNames.UTCI_SUN_NO_WIND: (
ColNames.DBT,
ColNames.MRT,
ColNames.WIND_SPEED_UTCI_0,
),
}
for out_col, (t_air, t_rad, wind) in recipes.items():
df[out_col] = utci_calc(df, t_air, t_rad, wind)
return df


def add_utci_categories(df: pd.DataFrame) -> pd.DataFrame:
"""Bin the four UTCI columns into categories."""
mapping = {
ColNames.UTCI_NO_SUN_WIND: ColNames.UTCI_NOSUN_WIND_CATEGORIES,
ColNames.UTCI_NO_SUN_NO_WIND: ColNames.UTCI_NOSUN_NOWIND_CATEGORIES,
ColNames.UTCI_SUN_WIND: ColNames.UTCI_SUN_WIND_CATEGORIES,
ColNames.UTCI_SUN_NO_WIND: ColNames.UTCI_SUN_NOWIND_CATEGORIES,
}
for src_col, dst_col in mapping.items():
df[dst_col] = pd.cut(x=df[src_col], bins=UTCI_BINS, labels=UTCI_LABELS)
return df


@code_timer
def create_df(lst, file_name):
"""Extract and clean the data. Return a pandas data from a url."""
Expand Down Expand Up @@ -241,14 +301,14 @@ def create_df(lst, file_name):

# Add in UTCI
sol_altitude = epw_df[ColNames.ELEVATION].mask(epw_df[ColNames.ELEVATION] <= 0, 0)
sharp = [45] * 8760
sharp = expand_to_hours(45)
sol_radiation_dir = epw_df[ColNames.DIR_NOR_RAD]
sol_transmittance = [1] * 8760 # CHECK VALUE
f_svv = [1] * 8760 # CHECK VALUE
f_bes = [1] * 8760 # CHECK VALUE
asw = [0.7] * 8760 # CHECK VALUE
posture = ["standing"] * 8760
floor_reflectance = [0.6] * 8760 # EXPOSE AS A VARIABLE?
sol_transmittance = expand_to_hours(1) # CHECK VALUE
f_svv = expand_to_hours(1) # CHECK VALUE
f_bes = expand_to_hours(1) # CHECK VALUE
asw = expand_to_hours(0.7) # CHECK VALUE
posture = expand_to_hours("standing")
floor_reflectance = expand_to_hours(0.6) # EXPOSE AS A VARIABLE?

mrt = np.vectorize(solar_gain)(
sol_altitude,
Expand Down Expand Up @@ -280,45 +340,10 @@ def create_df(lst, file_name):
epw_df[ColNames.WIND_SPEED_UTCI_0] = epw_df[ColNames.WIND_SPEED_UTCI].mask(
epw_df[ColNames.WIND_SPEED_UTCI] >= 0, 0.5
)
epw_df[ColNames.UTCI_NO_SUN_WIND] = utci(
epw_df[ColNames.DBT],
epw_df[ColNames.DBT],
epw_df[ColNames.WIND_SPEED_UTCI],
epw_df[ColNames.RH],
)
epw_df[ColNames.UTCI_NO_SUN_NO_WIND] = utci(
epw_df[ColNames.DBT],
epw_df[ColNames.DBT],
epw_df[ColNames.WIND_SPEED_UTCI_0],
epw_df[ColNames.RH],
)
epw_df[ColNames.UTCI_SUN_WIND] = utci(
epw_df[ColNames.DBT],
epw_df[ColNames.MRT],
epw_df[ColNames.WIND_SPEED_UTCI],
epw_df[ColNames.RH],
)
epw_df[ColNames.UTCI_SUN_NO_WIND] = utci(
epw_df[ColNames.DBT],
epw_df[ColNames.MRT],
epw_df[ColNames.WIND_SPEED_UTCI_0],
epw_df[ColNames.RH],
)

utci_bins = [-999, -40, -27, -13, 0, 9, 26, 32, 38, 46, 999]
utci_labels = [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4]
epw_df[ColNames.UTCI_NOSUN_WIND_CATEGORIES] = pd.cut(
x=epw_df[ColNames.UTCI_NO_SUN_WIND], bins=utci_bins, labels=utci_labels
)
epw_df[ColNames.UTCI_NOSUN_NOWIND_CATEGORIES] = pd.cut(
x=epw_df[ColNames.UTCI_NO_SUN_NO_WIND], bins=utci_bins, labels=utci_labels
)
epw_df[ColNames.UTCI_SUN_WIND_CATEGORIES] = pd.cut(
x=epw_df[ColNames.UTCI_SUN_WIND], bins=utci_bins, labels=utci_labels
)
epw_df[ColNames.UTCI_SUN_NOWIND_CATEGORIES] = pd.cut(
x=epw_df[ColNames.UTCI_SUN_NO_WIND], bins=utci_bins, labels=utci_labels
)
epw_df = add_utci_variants(epw_df)

epw_df = add_utci_categories(epw_df)

# Add psy values
ta_rh = np.vectorize(psy.psy_ta_rh)(epw_df[ColNames.DBT], epw_df[ColNames.RH])
Expand Down Expand Up @@ -407,11 +432,11 @@ def enthalpy(df, name):


def convert_data(df, mapping_json):
df[ColNames.ADAPTIVE_COMFORT] = df[ColNames.ADAPTIVE_COMFORT] * 1.8 + 32
df[ColNames.ADAPTIVE_CMF_80_LOW] = df[ColNames.ADAPTIVE_CMF_80_LOW] * 1.8 + 32
df[ColNames.ADAPTIVE_CMF_80_UP] = df[ColNames.ADAPTIVE_CMF_80_UP] * 1.8 + 32
df[ColNames.ADAPTIVE_CMF_90_LOW] = df[ColNames.ADAPTIVE_CMF_90_LOW] * 1.8 + 32
df[ColNames.ADAPTIVE_CMF_90_UP] = df[ColNames.ADAPTIVE_CMF_90_UP] * 1.8 + 32
convert_t_to_f(df, ColNames.ADAPTIVE_COMFORT)
convert_t_to_f(df, ColNames.ADAPTIVE_CMF_80_LOW)
convert_t_to_f(df, ColNames.ADAPTIVE_CMF_80_UP)
convert_t_to_f(df, ColNames.ADAPTIVE_CMF_90_LOW)
convert_t_to_f(df, ColNames.ADAPTIVE_CMF_90_UP)

mapping_dict = json.loads(mapping_json)
for key in json.loads(mapping_json):
Expand All @@ -423,6 +448,32 @@ def convert_data(df, mapping_json):
return json.dumps(mapping_dict)


def convert_t_to_f(df: pd.DataFrame, name: str):
"""Convert temperature from Celsius to Fahrenheit in-place for a given column.

Args:
df: The DataFrame containing the temperature column.
name: Column name to convert.

Returns:
None. The DataFrame is modified in-place.
"""
df[name] = df[name] * 1.8 + 32


def expand_to_hours(value: any, hours: int = 8760) -> list[any]:
"""Return a list with the input value repeated for a given number of hours.

Args:
value: The value to repeat.
hours: Number of repetitions. Defaults to 8760 (hours in a year).

Returns:
A list containing the value repeated `hours` times.
"""
return [value] * hours


if __name__ == "__main__":
# fmt: off
test_url = "https://www.energyplus.net/weather-download/europe_wmo_region_6/ITA//ITA_Bologna-Borgo.Panigale.161400_IGDG/all"
Expand Down
Loading