diff --git a/.vscode/extensions.json b/.vscode/extensions.json index e3fb08b..6bbc0da 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -8,10 +8,10 @@ "ms-toolsai.jupyter-renderers", "davidanson.vscode-markdownlint", "bierner.markdown-mermaid", - "ms-python.vscode-pylance", "ms-python.python", "ms-python.debugpy", "charliermarsh.ruff", - "mhutchie.git-graph" + "mhutchie.git-graph", + "astral-sh.ty" ] } diff --git a/README.md b/README.md index 3971f73..b64565d 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,60 @@ # UMEP Core -## Setup +## Installation + +```bash +pip install umep +``` + +Or with uv: + +```bash +uv add umep +``` + +## Troubleshooting + +If you encounter DLL or import errors (common on Windows), run the diagnostic tool: + +```bash +umep-doctor +``` + +### Common Issues + +**OSGeo4W / QGIS Users** + +Do NOT pip install into the OSGeo4W Python environment. The pre-installed GDAL binaries will conflict with rasterio's bundled DLLs, causing errors like: + +``` +ImportError: DLL load failed while importing _base: The specified procedure could not be found. +``` + +Instead, create a separate virtual environment: + +```bash +uv venv --python 3.12 +.venv\Scripts\activate # Windows +uv pip install umep +``` + +**Conda Alternative** + +If you prefer conda, use conda-forge for the geospatial dependencies: + +```bash +conda create -n umep -c conda-forge python=3.12 rasterio geopandas pyproj shapely +conda activate umep +pip install umep +``` + +## Development Setup -- Make sure you have a Python installation on your system -- Install `vscode` and `github` apps. - Install `uv` package manager (e.g. `pip install uv`). - Clone repo. -- Run `uv sync` from the directory where `pyproject.toml` in located to install `.venv` and packages. -- Select `.venv` Python environment. -- FYI: Recommended settings and extensions are included in the repo. Proceed if prompted to install extensions. -- Develop and commit to Github often! +- Run `uv sync` from the directory where `pyproject.toml` is located to install `.venv` and packages. +- Select `.venv` Python environment in your IDE. +- FYI: Recommended VS Code settings and extensions are included in the repo. ## Demo diff --git a/demos/athens-demo.py b/demos/athens-demo.py index 818e1b4..24682b8 100644 --- a/demos/athens-demo.py +++ b/demos/athens-demo.py @@ -42,6 +42,7 @@ cdsm_rast, cdsm_transf.to_gdal(), CRS.from_epsg(working_crs).to_wkt(), + coerce_f64_to_f32=True, ) # %% # wall info for SOLWEIG @@ -60,6 +61,8 @@ dem_path=input_path_str + "/DEM.tif", cdsm_path=output_folder_path_str + "/CDSM.tif", trans_veg_perc=3, + use_tiled_loading=False, + tile_size=200, ) # %% @@ -68,3 +71,28 @@ "demos/data/athens/parametersforsolweig.json", ) SRC.run() + +# %% +# skyview factor for SOLWEIG - tiled +skyviewfactor_algorithm.generate_svf( + dsm_path=input_path_str + "/DSM.tif", + bbox=total_extents, + out_dir=output_folder_path_str + "/svf_tiled", + dem_path=input_path_str + "/DEM.tif", + cdsm_path=output_folder_path_str + "/CDSM.tif", + trans_veg_perc=3, + use_tiled_loading=True, + tile_size=200, +) + +# %% +# Tiled +SRC = solweig_runner_core.SolweigRunCore( + "demos/data/athens/configsolweig_tiled.ini", + "demos/data/athens/parametersforsolweig.json", + use_tiled_loading=True, + tile_size=200, +) +SRC.run() + +# %% diff --git a/demos/data/athens/configsolweig_tiled.ini b/demos/data/athens/configsolweig_tiled.ini new file mode 100644 index 0000000..7034552 --- /dev/null +++ b/demos/data/athens/configsolweig_tiled.ini @@ -0,0 +1,87 @@ +[DEFAULT] + +#--------------------------------------------------------------------------------------------------------- +####### Control File ####### +#--------------------------------------------------------------------------------------------------------- +####### INPUTS ####### +# output path +output_dir=temp/athens/output_tiled/ +# working dir +working_dir=temp/athens/working/ +# Input ground and building dsm +dsm_path=demos/data/athens/DSM.tif +# Input vegetation dsm +cdsm_path=temp/athens/CDSM.tif +# Input trunkzone vegetation dsm +tdsm_path= +# Input Digital Elevation Model +dem_path=demos/data/athens/DEM.tif +# Input Land cover dataset +lc_path= +# Input wall height raster +wh_path=temp/athens/walls/wall_hts.tif +# Input wall aspect raster +wa_path=temp/athens/walls/wall_aspects.tif +# Skyview factor files +svf_path=temp/athens/svf_tiled/svfs.zip +# Input file for anisotrophic sky +aniso_path=temp/athens/svf_tiled/shadowmats.npz +# Point of Interest file for ground +poi_path= +poi_field= +plot_poi_patches=0 +# Input file for wall temperature scheme (Wallenberg et al. 2025) +wall_path= +# Point of Interest file for walls +woi_path= +woi_field= +# input meteorological file (i.e. forcing file) +met_path= + +## input settings ## +# estimate diffuse and global shortwave radiation from global radiation (1) +only_global=0 +# use vegetation scheme (lindberg and grimmond, 2011, TAAC) +use_veg_dem=1 +# consider leaf on trees full year (1) +conifer=0 +# consider person as a cylinder (1) or a box (0) +person_cylinder=1 +# utc time given in meteorological forcing file +utc=+3 +# land cover scheme activated (1) (Lindberg et al. 2016 UC) +use_landcover=0 +# use dem and dsm to identify building pixels (1) else, land cover will be used (0) +use_dem_for_buildings=1 +# use anisotropic sky (Wallenberg et al. XXXX and Wallenberg et al. XXXX) +use_aniso=1 +# use wall surface temperature scheme (Wallenberg et al. 2025, GMD) +use_wall_scheme=0 +# If building materials is not included in lc, then this is used for all buildings (Wood, Brick or Concrete) +wall_type=Concrete +# output settings +output_tmrt=1 +output_kup=0 +output_kdown=0 +output_lup=0 +output_ldown=0 +output_sh=0 +save_buildings=0 +output_kdiff=0 +output_tree_planter=0 +wall_netcdf=0 + +#--------------------------------------------------------------------------------------------------------- +# dates - used if an EPW-file is used +#--------------------------------------------------------------------------------------------------------- +# Set to 1 if an EPW file is used as meteorological forcing data +use_epw_file=1 +# Path to EPW file +epw_path=demos/data/athens/athens_2023.epw +# year,month,day,hour ## the start date/time for period of interest +epw_start_date=2023,01,01,00 +# year,month,day,hour # end date/time +epw_end_date=2023,01,03,23 +# hours to use (comma separated, optional) +epw_hours= +###################### diff --git a/pyproject.toml b/pyproject.toml index f1299b5..fe4e158 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "umep" -version = "0.0.1b47" +version = "0.0.1a20" description = "urban multi-scale environmental predictor" readme = "README.md" requires-python = ">=3.9, <3.14" @@ -60,6 +60,9 @@ dev = [ "pytest>=8.4.1", ] +[project.scripts] +umep-doctor = "umep.doctor:main" + [project.urls] homepage = "https://github.com/UMEP-dev/umep-core" documentation = "https://github.com/UMEP-dev/umep-core" diff --git a/tests/test_common.py b/tests/test_common.py index 1d84c37..bdad12d 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -2,7 +2,7 @@ import pyproj import pytest -from umep.common import load_raster, save_raster, xy_to_lnglat +from umep.common import load_raster, save_raster, shrink_bbox_to_pixel_grid, xy_to_lnglat def _make_gt(width, height, pixel_size=1): @@ -17,8 +17,8 @@ def test_save_and_load_raster_roundtrip(tmp_path): crs_wkt = pyproj.CRS.from_epsg(4326).to_wkt() # save and load - save_raster(str(out), data, trf, crs_wkt, no_data_val=-9999) - rast, trf_out, crs_out, nodata = load_raster(str(out)) + save_raster(str(out), data, trf, crs_wkt, no_data_val=-9999, coerce_f64_to_f32=True) + rast, trf_out, crs_out, nodata = load_raster(str(out), coerce_f64_to_f32=True) np.testing.assert_array_equal(rast, data) assert isinstance(trf_out, list) and len(trf_out) == 6 @@ -34,12 +34,12 @@ def test_load_raster_with_bbox(tmp_path): data = np.arange(100, dtype=np.float32).reshape(10, 10) trf = _make_gt(10, 10) crs_wkt = pyproj.CRS.from_epsg(4326).to_wkt() - save_raster(str(out), data, trf, crs_wkt, -9999) + save_raster(str(out), data, trf, crs_wkt, -9999, coerce_f64_to_f32=True) # bbox in spatial coords: [minx, miny, maxx, maxy] # For our geotransform bounds = [0,0,10,10] bbox = [2, 2, 5, 5] - rast_crop, trf_crop, crs_crop, nd = load_raster(str(out), bbox=bbox) + rast_crop, trf_crop, crs_crop, nd = load_raster(str(out), bbox=bbox, coerce_f64_to_f32=True) # Expected slice computed from implementation mapping assert rast_crop.shape == (3, 3) @@ -66,3 +66,48 @@ def test_xy_to_lnglat_scalar_and_array(): lons, lats = xy_to_lnglat(crs_wkt, xa, ya) assert np.array_equal(lons, np.array([0.0, 30.0])) assert np.array_equal(lats, np.array([0.0, -15.0])) + + +def test_shrink_bbox_to_pixel_grid(): + """Test bbox snapping to pixel grid by shrinking to nearest whole pixels.""" + # Grid with origin at (0, 0), 1m pixels + origin_x, origin_y = 0.0, 0.0 + pixel_width, pixel_height = 1.0, 1.0 + + # Bbox that aligns exactly with grid + bbox = (2.0, 3.0, 5.0, 7.0) + result = shrink_bbox_to_pixel_grid(bbox, origin_x, origin_y, pixel_width, pixel_height) + assert result == (2.0, 3.0, 5.0, 7.0), "Aligned bbox should not change" + + # Bbox that needs inward snapping + bbox = (2.3, 3.7, 5.9, 7.2) + result = shrink_bbox_to_pixel_grid(bbox, origin_x, origin_y, pixel_width, pixel_height) + # Should snap to: minx=ceil(2.3)=3, miny=ceil(3.7)=4, maxx=floor(5.9)=5, maxy=floor(7.2)=7 + assert result == (3.0, 4.0, 5.0, 7.0), "Bbox should shrink to nearest whole pixels" + + # Grid with non-zero origin + origin_x, origin_y = 10.0, 20.0 + bbox = (12.5, 23.2, 15.8, 26.9) + result = shrink_bbox_to_pixel_grid(bbox, origin_x, origin_y, pixel_width, pixel_height) + # Relative to origin: (2.5, 3.2, 5.8, 6.9) -> ceil/floor -> (3, 4, 5, 6) -> absolute (13, 24, 15, 26) + assert result == (13.0, 24.0, 15.0, 26.0), "Non-zero origin should be handled correctly" + + # Negative pixel height (north-up raster) + origin_x, origin_y = 0.0, 100.0 + pixel_width, pixel_height = 1.0, -1.0 + bbox = (2.3, 93.1, 5.7, 96.8) + result = shrink_bbox_to_pixel_grid(bbox, origin_x, origin_y, pixel_width, pixel_height) + # For y-axis with negative pixel size: + # miny=93.1 -> origin_y - y = 100 - 93.1 = 6.9 -> floor(6.9) = 6 -> y = 100 - 6 = 94 + # maxy=96.8 -> origin_y - y = 100 - 96.8 = 3.2 -> ceil(3.2) = 4 -> y = 100 - 4 = 96 + # Expected: (3.0, 94.0, 5.0, 96.0) + assert result == (3.0, 94.0, 5.0, 96.0), "Negative pixel height should work correctly" + + # Sub-pixel bbox that would collapse + bbox = (2.1, 3.2, 2.8, 3.7) + with pytest.raises(ValueError, match="collapsed"): + shrink_bbox_to_pixel_grid(bbox, origin_x, origin_y, pixel_width, pixel_height) + + # Invalid bbox (min >= max) + with pytest.raises(ValueError, match="invalid"): + shrink_bbox_to_pixel_grid((5.0, 3.0, 2.0, 7.0), origin_x, origin_y, pixel_width, pixel_height) diff --git a/tests/test_solweig_rasters.py b/tests/test_solweig_rasters.py new file mode 100644 index 0000000..edb7817 --- /dev/null +++ b/tests/test_solweig_rasters.py @@ -0,0 +1,63 @@ +from pathlib import Path +from typing import Optional + +import numpy as np + +from umep.class_configs import SolweigConfig +from umep.functions.SOLWEIGpython.solweig_runner_core import SolweigRunCore + +PROJECT_ROOT = Path(__file__).resolve().parents[1] +CONFIG_PATH = PROJECT_ROOT / "demos/data/athens/configsolweig.ini" +PARAMS_PATH = PROJECT_ROOT / "demos/data/athens/parametersforsolweig.json" + + +def _prepare_temp_config(tmp_path: Path) -> Path: + """Return a config file that writes outputs into a temp dir.""" + config = SolweigConfig() + config.from_file(str(CONFIG_PATH)) + + output_dir = tmp_path / "output" + working_dir = tmp_path / "working" + + config.output_dir = str(output_dir) + config.working_dir = str(working_dir) + + path_overrides = { + "dsm_path": PROJECT_ROOT / "demos/data/athens/DSM.tif", + "cdsm_path": PROJECT_ROOT / "temp/athens/CDSM.tif", + "dem_path": PROJECT_ROOT / "demos/data/athens/DEM.tif", + "wh_path": PROJECT_ROOT / "temp/athens/walls/wall_hts.tif", + "wa_path": PROJECT_ROOT / "temp/athens/walls/wall_aspects.tif", + "svf_path": PROJECT_ROOT / "temp/athens/svf/svfs.zip", + "aniso_path": PROJECT_ROOT / "temp/athens/svf/shadowmats.npz", + "epw_path": PROJECT_ROOT / "demos/data/athens/athens_2023.epw", + } + for attr, path in path_overrides.items(): + setattr(config, attr, str(path)) + + tmp_config = tmp_path / "configsolweig.ini" + config.to_file(str(tmp_config)) + return tmp_config + + +def _assert_float32(array: Optional[np.ndarray], name: str): + if array is not None: + assert array.dtype == np.float32, f"{name} dtype was {array.dtype}, expected float32" + + +def test_athens_runner_rasters_are_float32(tmp_path): + tmp_config = _prepare_temp_config(tmp_path) + runner = SolweigRunCore(str(tmp_config), str(PARAMS_PATH)) + raster_data = runner.raster_data + + _assert_float32(raster_data.dsm, "dsm") + _assert_float32(raster_data.wallheight, "wallheight") + _assert_float32(raster_data.wallaspect, "wallaspect") + _assert_float32(raster_data.dem, "dem") + _assert_float32(raster_data.cdsm, "cdsm") + _assert_float32(raster_data.tdsm, "tdsm") + _assert_float32(raster_data.bush, "bush") + _assert_float32(raster_data.svfbuveg, "svfbuveg") + _assert_float32(raster_data.buildings, "buildings") + + runner.test_hook() diff --git a/umep/class_configs.py b/umep/class_configs.py index 49267ea..b164660 100644 --- a/umep/class_configs.py +++ b/umep/class_configs.py @@ -2,7 +2,7 @@ import logging import zipfile from dataclasses import dataclass -from typing import Any, Optional, Tuple, Union +from typing import Any, Optional, Union import numpy as np import scipy.ndimage as ndi @@ -19,6 +19,7 @@ from . import common from .functions import wallalgorithms as wa from .functions.SOLWEIGpython.wall_surface_temperature import load_walls +from .tile_manager import TileManager, TileSpec from .util.SEBESOLWEIGCommonFiles import sun_position as sp logger = logging.getLogger(__name__) @@ -74,14 +75,19 @@ class SolweigConfig: plot_poi_patches: bool = False def to_file(self, file_path: str): - """Save configuration to a file.""" + """ + Save configuration to a file. + + Note: use_tiled_loading enables memory-efficient processing of large datasets + by loading tiles on-demand with overlaps calculated from amaxvalue and pixel size. + """ logger.info("Saving configuration to %s", file_path) with open(file_path, "w") as f: for key in type(self).__annotations__: value = getattr(self, key) if value is None: value = "" # Default to empty string if None - if type(self).__annotations__[key] == bool: + if type(self).__annotations__[key] == bool or type(self).__annotations__[key] == int: f.write(f"{key}={int(value)}\n") else: f.write(f"{key}={value}\n") @@ -234,9 +240,9 @@ def __init__( data_len = len(self.YYYY) self.dectime = self.DOY + self.hours / 24 + self.minu / (60 * 24.0) if data_len == 1: - halftimestepdec = 0 + halftimestepdec = 0.0 else: - halftimestepdec = (self.dectime[1] - self.dectime[0]) / 2.0 + halftimestepdec = float((self.dectime[1] - self.dectime[0]) / 2.0) # convert from f32 time = { "sec": 0, "UTC": UTC, @@ -244,21 +250,31 @@ def __init__( sunmaximum = 0.0 # initialize arrays - self.altitude = np.empty(data_len) - self.azimuth = np.empty(data_len) - self.zen = np.empty(data_len) - self.jday = np.empty(data_len) - self.leafon = np.empty(data_len) - self.psi = np.empty(data_len) - self.altmax = np.empty(data_len) - self.Twater = np.empty(data_len) - self.CI = np.empty(data_len) + self.altitude = np.empty(data_len, dtype=np.float32) + self.azimuth = np.empty(data_len, dtype=np.float32) + self.zen = np.empty(data_len, dtype=np.float32) + self.jday = np.empty(data_len, dtype=np.float32) + self.leafon = np.empty(data_len, dtype=np.float32) + self.psi = np.empty(data_len, dtype=np.float32) + self.altmax = np.empty(data_len, dtype=np.float32) + self.Twater = np.empty(data_len, dtype=np.float32) + self.CI = np.empty(data_len, dtype=np.float32) sunmax = dict() # These variables lag across days until updated Twater = None CI = 1 + + def _scalar(value: Any) -> float: + """Convert sun-position outputs to native float scalars.""" + if isinstance(value, (int, float, np.floating)): + return float(value) + arr = np.asarray(value) + if arr.ndim == 0: + return float(arr) + return float(arr.reshape(-1)[0]) + # Iterate over time steps and set vars for i in range(data_len): YMD = datetime.datetime(int(self.YYYY[i]), 1, 1) + datetime.timedelta(int(self.DOY[i]) - 1) @@ -267,8 +283,8 @@ def __init__( fifteen = 0.0 sunmaximum = -90.0 sunmax["zenith"] = 90.0 - while sunmaximum <= 90.0 - sunmax["zenith"]: - sunmaximum = 90.0 - sunmax["zenith"] + while sunmaximum <= 90.0 - _scalar(sunmax["zenith"]): + sunmaximum = 90.0 - _scalar(sunmax["zenith"]) fifteen = fifteen + 15.0 / 1440.0 HM = datetime.timedelta(days=(60 * 10) / 1440.0 + fifteen) YMDHM = YMD + HM @@ -278,7 +294,7 @@ def __init__( time["hour"] = YMDHM.hour time["min"] = YMDHM.minute sunmax = sp.sun_position(time, location) - self.altmax[i] = sunmaximum + self.altmax[i] = float(sunmaximum) # Calculate sun position half = datetime.timedelta(days=halftimestepdec) H = datetime.timedelta(hours=int(self.hours[i])) @@ -290,13 +306,15 @@ def __init__( time["hour"] = YMDHM.hour time["min"] = YMDHM.minute sun = sp.sun_position(time, location) - if (sun["zenith"] > 89.0) & ( - sun["zenith"] <= 90.0 + sun_zenith = _scalar(sun["zenith"]) + if (sun_zenith > 89.0) & ( + sun_zenith <= 90.0 ): # Hopefully fixes weird values in Perez et al. when altitude < 1.0, i.e. close to sunrise/sunset - sun["zenith"] = 89.0 - self.altitude[i] = 90.0 - sun["zenith"] - self.zen[i] = sun["zenith"] * (np.pi / 180.0) - self.azimuth[i] = sun["azimuth"] + sun_zenith = 89.0 + sun_azimuth = _scalar(sun["azimuth"]) + self.altitude[i] = 90.0 - sun_zenith + self.zen[i] = sun_zenith * (np.pi / 180.0) + self.azimuth[i] = sun_azimuth # day of year and check for leap year # if calendar.isleap(time["year"]): # dayspermonth = np.atleast_2d([31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]) @@ -387,31 +405,48 @@ class SvfData: svf_veg_blocks_bldg_sh_north: np.ndarray svfalfa: np.ndarray - def __init__(self, model_configs: SolweigConfig): - logger.info("Loading SVF data from %s", model_configs.svf_path) + def __init__(self, model_configs: SolweigConfig, tile_spec: Optional[TileSpec] = None): + if not tile_spec: + logger.info("Loading SVF data from %s", model_configs.svf_path) svf_path_str = str(common.check_path(model_configs.svf_path, make_dir=False)) in_path_str = str(common.check_path(model_configs.working_dir, make_dir=False)) - # Unzip + + # Always unzip SVF files to ensure they exist for both tiled and non-tiled execution with zipfile.ZipFile(svf_path_str, "r") as zip_ref: zip_ref.extractall(in_path_str) + + # Helper to load raster or window + def load(filename): + path = in_path_str + "/" + filename + if tile_spec: + data = common.read_raster_window(path, tile_spec.full_slice, band=1) + if data.dtype == np.float64: + data = data.astype(np.float32) + return data + else: + data, _, _, _ = common.load_raster(path, coerce_f64_to_f32=True) + return data + # Load SVF rasters - self.svf, _, _, _ = common.load_raster(in_path_str + "/" + "svf.tif") - self.svf_east, _, _, _ = common.load_raster(in_path_str + "/" + "svfE.tif") - self.svf_south, _, _, _ = common.load_raster(in_path_str + "/" + "svfS.tif") - self.svf_west, _, _, _ = common.load_raster(in_path_str + "/" + "svfW.tif") - self.svf_north, _, _, _ = common.load_raster(in_path_str + "/" + "svfN.tif") + self.svf = load("svf.tif") + self.svf_east = load("svfE.tif") + self.svf_south = load("svfS.tif") + self.svf_west = load("svfW.tif") + self.svf_north = load("svfN.tif") + if model_configs.use_veg_dem: - self.svf_veg, _, _, _ = common.load_raster(in_path_str + "/" + "svfveg.tif") - self.svf_veg_east, _, _, _ = common.load_raster(in_path_str + "/" + "svfEveg.tif") - self.svf_veg_south, _, _, _ = common.load_raster(in_path_str + "/" + "svfSveg.tif") - self.svf_veg_west, _, _, _ = common.load_raster(in_path_str + "/" + "svfWveg.tif") - self.svf_veg_north, _, _, _ = common.load_raster(in_path_str + "/" + "svfNveg.tif") - self.svf_veg_blocks_bldg_sh, _, _, _ = common.load_raster(in_path_str + "/" + "svfaveg.tif") - self.svf_veg_blocks_bldg_sh_east, _, _, _ = common.load_raster(in_path_str + "/" + "svfEaveg.tif") - self.svf_veg_blocks_bldg_sh_south, _, _, _ = common.load_raster(in_path_str + "/" + "svfSaveg.tif") - self.svf_veg_blocks_bldg_sh_west, _, _, _ = common.load_raster(in_path_str + "/" + "svfWaveg.tif") - self.svf_veg_blocks_bldg_sh_north, _, _, _ = common.load_raster(in_path_str + "/" + "svfNaveg.tif") - logger.info("Vegetation SVF data loaded.") + self.svf_veg = load("svfveg.tif") + self.svf_veg_east = load("svfEveg.tif") + self.svf_veg_south = load("svfSveg.tif") + self.svf_veg_west = load("svfWveg.tif") + self.svf_veg_north = load("svfNveg.tif") + self.svf_veg_blocks_bldg_sh = load("svfaveg.tif") + self.svf_veg_blocks_bldg_sh_east = load("svfEaveg.tif") + self.svf_veg_blocks_bldg_sh_south = load("svfSaveg.tif") + self.svf_veg_blocks_bldg_sh_west = load("svfWaveg.tif") + self.svf_veg_blocks_bldg_sh_north = load("svfNaveg.tif") + if not tile_spec: + logger.info("Vegetation SVF data loaded.") else: self.svf_veg = np.ones_like(self.svf) self.svf_veg_east = np.ones_like(self.svf) @@ -423,23 +458,31 @@ def __init__(self, model_configs: SolweigConfig): self.svf_veg_blocks_bldg_sh_south = np.ones_like(self.svf) self.svf_veg_blocks_bldg_sh_west = np.ones_like(self.svf) self.svf_veg_blocks_bldg_sh_north = np.ones_like(self.svf) + # Calculate SVF alpha tmp = self.svf + self.svf_veg - 1.0 tmp[tmp < 0.0] = 0.0 - self.svfalfa = np.arcsin(np.exp(np.log(1.0 - tmp) / 2.0)) - logger.info("SVF data loaded and processed.") + eps = np.finfo(np.float32).tiny + safe_term = np.clip(1.0 - tmp, eps, 1.0) + self.svfalfa = np.arcsin(np.exp(np.log(safe_term) / 2.0)) + if not tile_spec: + logger.info("SVF data loaded and processed.") def raster_preprocessing( dsm: np.ndarray, - dem: np.ndarray | None, - cdsm: np.ndarray | None, - tdsm: np.ndarray | None, + dem: Optional[np.ndarray], + cdsm: Optional[np.ndarray], + tdsm: Optional[np.ndarray], trunk_ratio: float, pix_size: float, amax_local_window_m: int = 100, amax_local_perc: float = 99.9, + quiet: bool = False, ): + nan32 = np.float32(np.nan) + zero32 = np.float32(0.0) + threshold = np.float32(0.1) # amax if dem is None: amaxvalue = float(np.nanmax(dsm) - np.nanmin(dsm)) @@ -452,9 +495,10 @@ def raster_preprocessing( local_min = ndi.minimum_filter(dsm, size=window, mode="nearest") local_range = dsm - local_min amaxvalue = float(np.nanpercentile(local_range, amax_local_perc)) - logger.info( - f"amax {amaxvalue}m derived from {amax_local_window_m}m window and {amax_local_perc}th percentile." - ) + if not quiet: + logger.info( + f"amax {amaxvalue}m derived from {amax_local_window_m}m window and {amax_local_perc}th percentile." + ) except Exception: # Fallback to global range if filtering fails for any reason amaxvalue = float(np.nanmax(dsm) - np.nanmin(dsm)) @@ -463,6 +507,7 @@ def raster_preprocessing( # CDSM is relative to flat surface without DEM if cdsm is None: cdsm = np.zeros_like(dsm) + tdsm = np.zeros_like(dsm) else: if np.nanmax(cdsm) > 50: logger.warning( @@ -471,7 +516,8 @@ def raster_preprocessing( cdsm[np.isnan(cdsm)] = 0.0 # TDSM is relative to flat surface without DEM if tdsm is None: - logger.info("Tree trunk TDSM not provided; using trunk ratio for TDSM.") + if not quiet: + logger.info("Tree trunk TDSM not provided; using trunk ratio for TDSM.") tdsm = cdsm * trunk_ratio if np.nanmax(tdsm) > 50: logger.warning( @@ -486,24 +532,32 @@ def raster_preprocessing( vegmax = np.nanmax(cdsm) - np.nanmin(cdsm) vegmax = min(vegmax, 50) if vegmax > amaxvalue: - logger.warning(f"Overriding amax {amaxvalue}m with veg max height of {vegmax}m.") + if not quiet: + logger.warning(f"Overriding amax {amaxvalue}m with veg max height of {vegmax}m.") amaxvalue = vegmax # Set vegetated pixels to DEM + CDSM otherwise DSM + CDSM if dem is not None: - cdsm = np.where(~np.isnan(dem), dem + cdsm, np.nan) - cdsm = np.where(cdsm - dem < 0.1, 0, cdsm) - tdsm = np.where(~np.isnan(dem), dem + tdsm, np.nan) - tdsm = np.where(tdsm - dem < 0.1, 0, tdsm) + cdsm = np.where(~np.isnan(dem), dem + cdsm, nan32) + cdsm = np.where(cdsm - dem < threshold, zero32, cdsm) + tdsm = np.where(~np.isnan(dem), dem + tdsm, nan32) + tdsm = np.where(tdsm - dem < threshold, zero32, tdsm) else: - cdsm = np.where(~np.isnan(dsm), dsm + cdsm, np.nan) - cdsm = np.where(cdsm - dsm < 0.1, 0, cdsm) - tdsm = np.where(~np.isnan(dsm), dsm + tdsm, np.nan) - tdsm = np.where(tdsm - dsm < 0.1, 0, tdsm) + cdsm = np.where(~np.isnan(dsm), dsm + cdsm, nan32) + cdsm = np.where(cdsm - dsm < threshold, zero32, cdsm) + tdsm = np.where(~np.isnan(dsm), dsm + tdsm, nan32) + tdsm = np.where(tdsm - dsm < threshold, zero32, tdsm) - logger.info("Calculated max height for shadows: %.2fm", amaxvalue) + if not quiet: + logger.info("Calculated max height for shadows: %.2fm", amaxvalue) if amaxvalue > 100: - logger.warning("Max shadow height exceeds 100m, double-check the input rasters for anomalies.") + if not quiet: + logger.warning("Max shadow height exceeds 100m, double-check the input rasters for anomalies.") + + dsm = np.asarray(dsm, dtype=np.float32) + dem = None if dem is None else np.asarray(dem, dtype=np.float32) + cdsm = np.asarray(cdsm, dtype=np.float32) + tdsm = np.asarray(tdsm, dtype=np.float32) return dsm, dem, cdsm, tdsm, amaxvalue @@ -513,9 +567,9 @@ class RasterData: amaxvalue: float dsm: np.ndarray - crs_wkt: str - trf_arr: np.ndarray - nd_val: float + crs_wkt: Optional[str] + trf_arr: list[float] + nd_val: Optional[float] scale: float rows: int cols: int @@ -536,13 +590,52 @@ def __init__( svf_data: SvfData, amax_local_window_m: int = 100, amax_local_perc: float = 99.9, + tile_spec: Optional[TileSpec] = None, ): + if model_configs.dsm_path is None: + raise ValueError("DSM path must be provided before initializing raster data.") + if model_configs.wh_path is None or model_configs.wa_path is None: + raise ValueError("Wall height and aspect rasters must be provided.") + if model_configs.output_dir is None: + raise ValueError("Output directory must be configured before initializing raster data.") + output_dir = model_configs.output_dir + dsm_path = model_configs.dsm_path + wh_path = model_configs.wh_path + wa_path = model_configs.wa_path + + # Helper to load raster or window + def load(path, band=1): + if tile_spec: + data = common.read_raster_window(path, tile_spec.full_slice, band=band) + if data.dtype == np.float64: + data = data.astype(np.float32) + return data + else: + data, _, _, _ = common.load_raster(path, bbox=None, coerce_f64_to_f32=True) + return data + # Load DSM - self.dsm, self.trf_arr, self.crs_wkt, self.nd_val = common.load_raster(model_configs.dsm_path, bbox=None) - logger.info("DSM loaded from %s", model_configs.dsm_path) + if tile_spec: + self.dsm = load(dsm_path) + meta = common.get_raster_metadata(dsm_path) + # Transform is already in GDAL format [c, a, b, f, d, e] + self.trf_arr = meta["transform"] + self.crs_wkt = meta["crs"] + self.nd_val = meta["nodata"] + else: + self.dsm, self.trf_arr, self.crs_wkt, self.nd_val = common.load_raster( + dsm_path, bbox=None, coerce_f64_to_f32=True + ) + + if not tile_spec: + logger.info("DSM loaded from %s", dsm_path) self.scale = 1 / self.trf_arr[1] self.rows = self.dsm.shape[0] self.cols = self.dsm.shape[1] + one32 = np.float32(1.0) + zero32 = np.float32(0.0) + + self.dsm = np.ascontiguousarray(self.dsm, dtype=np.float32) # TODO: is this needed? # if self.dsm.min() < 0: # dsmraise = np.abs(self.dsm.min()) @@ -552,36 +645,56 @@ def __init__( # WALLS # heights - self.wallheight, wh_trf, wh_crs, _ = common.load_raster(model_configs.wh_path, bbox=None) - if not self.wallheight.shape == self.dsm.shape: - raise ValueError("Mismatching raster shapes for wall heights and DSM.") - if not np.allclose(self.trf_arr, wh_trf): - raise ValueError("Mismatching spatial transform for wall heights and DSM.") - if not self.crs_wkt == wh_crs: - raise ValueError("Mismatching CRS for wall heights and DSM.") - logger.info("Wall heights loaded") + if tile_spec: + self.wallheight = load(wh_path) + else: + self.wallheight, wh_trf, wh_crs, _ = common.load_raster(wh_path, bbox=None, coerce_f64_to_f32=True) + if not self.wallheight.shape == self.dsm.shape: + raise ValueError("Mismatching raster shapes for wall heights and DSM.") + if not np.allclose(self.trf_arr, wh_trf): + raise ValueError("Mismatching spatial transform for wall heights and DSM.") + if not self.crs_wkt == wh_crs: + raise ValueError("Mismatching CRS for wall heights and DSM.") + + if not tile_spec: + logger.info("Wall heights loaded") + self.wallheight = np.ascontiguousarray(self.wallheight, dtype=np.float32) # aspects - self.wallaspect, wa_trf, wa_crs, _ = common.load_raster(model_configs.wa_path, bbox=None) - if not self.wallaspect.shape == self.dsm.shape: - raise ValueError("Mismatching raster shapes for wall aspects and DSM.") - if not np.allclose(self.trf_arr, wa_trf): - raise ValueError("Mismatching spatial transform for wall aspects and DSM.") - if not self.crs_wkt == wa_crs: - raise ValueError("Mismatching CRS for wall aspects and DSM.") - logger.info("Wall aspects loaded") + if tile_spec: + self.wallaspect = load(wa_path) + else: + self.wallaspect, wa_trf, wa_crs, _ = common.load_raster(wa_path, bbox=None, coerce_f64_to_f32=True) + if not self.wallaspect.shape == self.dsm.shape: + raise ValueError("Mismatching raster shapes for wall aspects and DSM.") + if not np.allclose(self.trf_arr, wa_trf): + raise ValueError("Mismatching spatial transform for wall aspects and DSM.") + if not self.crs_wkt == wa_crs: + raise ValueError("Mismatching CRS for wall aspects and DSM.") + + if not tile_spec: + logger.info("Wall aspects loaded") + self.wallaspect = np.ascontiguousarray(self.wallaspect, dtype=np.float32) # DEM # TODO: Is DEM always provided? if model_configs.dem_path: dem_path_str = str(common.check_path(model_configs.dem_path)) - self.dem, dem_trf, dem_crs, dem_nd_val = common.load_raster(dem_path_str, bbox=None) - if not self.dem.shape == self.dsm.shape: - raise ValueError("Mismatching raster shapes for DEM and CDSM.") - if dem_crs is not None and dem_crs != self.crs_wkt: - raise ValueError("Mismatching CRS for DEM and CDSM.") - if not np.allclose(self.trf_arr, dem_trf): - raise ValueError("Mismatching spatial transform for DEM and CDSM.") - logger.info("DEM loaded from %s", model_configs.dem_path) + if tile_spec: + self.dem = load(dem_path_str) + else: + self.dem, dem_trf, dem_crs, dem_nd_val = common.load_raster( + dem_path_str, bbox=None, coerce_f64_to_f32=True + ) + if not self.dem.shape == self.dsm.shape: + raise ValueError("Mismatching raster shapes for DEM and CDSM.") + if dem_crs is not None and dem_crs != self.crs_wkt: + raise ValueError("Mismatching CRS for DEM and CDSM.") + if not np.allclose(self.trf_arr, dem_trf): + raise ValueError("Mismatching spatial transform for DEM and CDSM.") + + if not tile_spec: + logger.info("DEM loaded from %s", model_configs.dem_path) + self.dem = np.ascontiguousarray(self.dem, dtype=np.float32) # dem[dem == dem_nd_val] = 0.0 # TODO: Check if this is needed re DSM ramifications # if dem.min() < 0: @@ -592,24 +705,40 @@ def __init__( # Vegetation if model_configs.use_veg_dem: - self.cdsm, vegdsm_trf, vegdsm_crs, _ = common.load_raster(model_configs.cdsm_path, bbox=None) - if not self.cdsm.shape == self.dsm.shape: - raise ValueError("Mismatching raster shapes for DSM and CDSM.") - if vegdsm_crs is not None and vegdsm_crs != self.crs_wkt: - raise ValueError("Mismatching CRS for DSM and CDSM.") - if not np.allclose(self.trf_arr, vegdsm_trf): - raise ValueError("Mismatching spatial transform for DSM and CDSM.") - logger.info("Vegetation DSM loaded from %s", model_configs.cdsm_path) - # Tree DSM - if model_configs.tdsm_path: - self.tdsm, vegdsm2_trf, vegdsm2_crs, _ = common.load_raster(model_configs.tdsm_path, bbox=None) - if not self.tdsm.shape == self.dsm.shape: + if tile_spec: + self.cdsm = load(model_configs.cdsm_path) + else: + self.cdsm, vegdsm_trf, vegdsm_crs, _ = common.load_raster( + model_configs.cdsm_path, bbox=None, coerce_f64_to_f32=True + ) + if not self.cdsm.shape == self.dsm.shape: raise ValueError("Mismatching raster shapes for DSM and CDSM.") - if vegdsm2_crs is not None and vegdsm2_crs != self.crs_wkt: + if vegdsm_crs is not None and vegdsm_crs != self.crs_wkt: raise ValueError("Mismatching CRS for DSM and CDSM.") - if not np.allclose(self.trf_arr, vegdsm2_trf): + if not np.allclose(self.trf_arr, vegdsm_trf): raise ValueError("Mismatching spatial transform for DSM and CDSM.") - logger.info("Tree DSM loaded from %s", model_configs.tdsm_path) + + if not tile_spec: + logger.info("Vegetation DSM loaded from %s", model_configs.cdsm_path) + self.cdsm = np.ascontiguousarray(self.cdsm, dtype=np.float32) + # Tree DSM + if model_configs.tdsm_path: + if tile_spec: + self.tdsm = load(model_configs.tdsm_path) + else: + self.tdsm, vegdsm2_trf, vegdsm2_crs, _ = common.load_raster( + model_configs.tdsm_path, bbox=None, coerce_f64_to_f32=True + ) + if not self.tdsm.shape == self.dsm.shape: + raise ValueError("Mismatching raster shapes for DSM and CDSM.") + if vegdsm2_crs is not None and vegdsm2_crs != self.crs_wkt: + raise ValueError("Mismatching CRS for DSM and CDSM.") + if not np.allclose(self.trf_arr, vegdsm2_trf): + raise ValueError("Mismatching spatial transform for DSM and CDSM.") + + if not tile_spec: + logger.info("Tree DSM loaded from %s", model_configs.tdsm_path) + self.tdsm = np.ascontiguousarray(self.tdsm, dtype=np.float32) else: self.tdsm = None else: @@ -625,69 +754,101 @@ def __init__( self.trf_arr[1], amax_local_window_m, amax_local_perc, + quiet=bool(tile_spec), ) + # Clamp amaxvalue for tiled processing + if tile_spec and self.amaxvalue > 150.0: + logger.info(f"Clamping amaxvalue {self.amaxvalue} to 150m for tiled processing.") + self.amaxvalue = 150.0 + + self.dsm = np.ascontiguousarray(self.dsm, dtype=np.float32) + if self.dem is not None: + self.dem = np.ascontiguousarray(self.dem, dtype=np.float32) + if self.cdsm is not None: + self.cdsm = np.ascontiguousarray(self.cdsm, dtype=np.float32) + if self.tdsm is not None: + self.tdsm = np.ascontiguousarray(self.tdsm, dtype=np.float32) + # bushes etc if model_configs.use_veg_dem: + transmissivity = np.float32(model_params.Tree_settings.Value.Transmissivity) self.bush = np.logical_not(self.tdsm * self.cdsm) * self.cdsm - self.svfbuveg = svf_data.svf - (1.0 - svf_data.svf_veg) * ( - 1.0 - model_params.Tree_settings.Value.Transmissivity - ) + self.bush = np.ascontiguousarray(self.bush, dtype=np.float32) + self.svfbuveg = svf_data.svf - (one32 - svf_data.svf_veg) * (one32 - transmissivity) + self.svfbuveg = np.ascontiguousarray(self.svfbuveg, dtype=np.float32) else: - logger.info("Vegetation DEM not used; vegetation arrays set to None.") - self.bush = np.zeros([self.rows, self.cols]) - self.svfbuveg = svf_data.svf + if not tile_spec: + logger.info("Vegetation DEM not used; vegetation arrays set to None.") + self.bush = np.zeros([self.rows, self.cols], dtype=np.float32) + self.svfbuveg = np.ascontiguousarray(svf_data.svf, dtype=np.float32) - common.save_raster( - model_configs.output_dir + "/input-dsm.tif", - self.dsm, - self.trf_arr, - self.crs_wkt, - self.nd_val, - ) - common.save_raster( - model_configs.output_dir + "/input-cdsm.tif", - self.cdsm, - self.trf_arr, - self.crs_wkt, - self.nd_val, - ) - common.save_raster( - model_configs.output_dir + "/input-tdsm.tif", - self.tdsm, - self.trf_arr, - self.crs_wkt, - self.nd_val, - ) - common.save_raster( - model_configs.output_dir + "/input-svfbuveg.tif", - self.svfbuveg, - self.trf_arr, - self.crs_wkt, - self.nd_val, - ) - common.save_raster( - model_configs.output_dir + "/input-bush.tif", - self.bush, - self.trf_arr, - self.crs_wkt, - self.nd_val, - ) + if not tile_spec: + common.save_raster( + output_dir + "/input-dsm.tif", + self.dsm, + self.trf_arr, + self.crs_wkt, + self.nd_val, + coerce_f64_to_f32=True, + ) + common.save_raster( + output_dir + "/input-cdsm.tif", + self.cdsm, + self.trf_arr, + self.crs_wkt, + self.nd_val, + coerce_f64_to_f32=True, + ) + common.save_raster( + output_dir + "/input-tdsm.tif", + self.tdsm, + self.trf_arr, + self.crs_wkt, + self.nd_val, + coerce_f64_to_f32=True, + ) + common.save_raster( + output_dir + "/input-svfbuveg.tif", + self.svfbuveg, + self.trf_arr, + self.crs_wkt, + self.nd_val, + coerce_f64_to_f32=True, + ) + common.save_raster( + output_dir + "/input-bush.tif", + self.bush, + self.trf_arr, + self.crs_wkt, + self.nd_val, + coerce_f64_to_f32=True, + ) # Land cover if model_configs.use_landcover: - lc_path_str = str(common.check_path(model_configs.lc_path)) - self.lcgrid, lc_trf, lc_crs, _ = common.load_raster(lc_path_str, bbox=None) - if not self.lcgrid.shape == self.dsm.shape: - raise ValueError("Mismatching raster shapes for land cover and DSM.") - if lc_crs is not None and lc_crs != self.crs_wkt: - raise ValueError("Mismatching CRS for land cover and DSM.") - if not np.allclose(self.trf_arr, lc_trf): - raise ValueError("Mismatching spatial transform for land cover and DSM.") - logger.info("Land cover loaded from %s", model_configs.lc_path) + lc_path = model_configs.lc_path + if lc_path is None: + raise ValueError("Land cover path must be provided when use_landcover is True.") + lc_path_str = str(common.check_path(lc_path)) + if tile_spec: + self.lcgrid = load(lc_path_str) + else: + self.lcgrid, lc_trf, lc_crs, _ = common.load_raster(lc_path_str, bbox=None, coerce_f64_to_f32=True) + if not self.lcgrid.shape == self.dsm.shape: + raise ValueError("Mismatching raster shapes for land cover and DSM.") + if lc_crs is not None and lc_crs != self.crs_wkt: + raise ValueError("Mismatching CRS for land cover and DSM.") + if not np.allclose(self.trf_arr, lc_trf): + raise ValueError("Mismatching spatial transform for land cover and DSM.") + + if not tile_spec: + logger.info("Land cover loaded from %s", lc_path) + self.lcgrid = np.ascontiguousarray(self.lcgrid, dtype=np.float32) else: self.lcgrid = None - logger.info("Land cover not used; lcgrid set to None.") + if not tile_spec: + logger.info("Land cover not used; lcgrid set to None.") # Buildings from land cover option # TODO: Check intended logic here @@ -700,28 +861,40 @@ def __init__( buildings[buildings == 4] = 1 buildings[buildings == 3] = 1 buildings[buildings == 2] = 0 - self.buildings = buildings - logger.info("Buildings raster created from land cover data.") + self.buildings = np.ascontiguousarray(buildings, dtype=np.float32) + if not tile_spec: + logger.info("Buildings raster created from land cover data.") elif model_configs.use_dem_for_buildings: - buildings = np.where(~np.isnan(self.dem) & ~np.isnan(self.dsm), self.dsm - self.dem, 0.0) + if self.dem is None: + raise ValueError("DEM raster must be available when use_dem_for_buildings is True.") + height_diff = self.dsm - self.dem + buildings = np.where( + ~np.isnan(self.dem) & ~np.isnan(self.dsm), + height_diff, + zero32, + ) # TODO: Check intended logic here - 1 vs 0 buildings[buildings < 2.0] = 1.0 buildings[buildings >= 2.0] = 0.0 - self.buildings = buildings - logger.info("Buildings raster created from DSM and DEM data.") + self.buildings = np.ascontiguousarray(buildings, dtype=np.float32) + if not tile_spec: + logger.info("Buildings raster created from DSM and DEM data.") else: self.buildings = None - logger.info("Buildings raster not created.") + if not tile_spec: + logger.info("Buildings raster not created.") # Save buildings raster if requested if self.buildings is not None and model_configs.save_buildings: common.save_raster( - model_configs.output_dir + "/buildings.tif", + output_dir + "/buildings.tif", self.buildings, self.trf_arr, self.crs_wkt, self.nd_val, + coerce_f64_to_f32=True, ) - logger.info("Buildings raster saved to %s/buildings.tif", model_configs.output_dir) + if not tile_spec: + logger.info("Buildings raster saved to %s/buildings.tif", output_dir) class ShadowMatrices: @@ -741,31 +914,49 @@ def __init__( model_configs: SolweigConfig, model_params, svf_data: SvfData, + tile_spec: Optional[TileSpec] = None, ): self.use_aniso = model_configs.use_aniso if self.use_aniso: - logger.info("Loading anisotropic shadow matrices from %s", model_configs.aniso_path) + if not tile_spec: + logger.info("Loading anisotropic shadow matrices from %s", model_configs.aniso_path) aniso_path_str = str(common.check_path(model_configs.aniso_path, make_dir=False)) + + # Load data. If tiled, we try to slice to save memory. + # Note: np.load on .npz reads the whole array into memory when accessed. + # Ideally these should be .npy or memory-mapped if they are huge. data = np.load(aniso_path_str) - self.shmat = data["shadowmat"].astype(np.float32) - self.vegshmat = data["vegshadowmat"].astype(np.float32) - self.vbshvegshmat = data["vbshmat"].astype(np.float32) + + def load_slice(key): + if tile_spec: + row_slice, col_slice = tile_spec.full_slice + arr = data[key][row_slice, col_slice, :] + else: + arr = data[key] + + # Handle uint8 format (new optimized format - 75% smaller) + # Convert uint8 (0-255) back to float32 (0.0-1.0) + if arr.dtype == np.uint8: + return arr.astype(np.float32) / 255.0 + else: + # Legacy float32 format + return arr.astype(np.float32) + + self.shmat = load_slice("shadowmat") + self.vegshmat = load_slice("vegshadowmat") + self.vbshvegshmat = load_slice("vbshmat") + if model_configs.use_veg_dem: # TODO: thoughts on memory optimization for smaller machines / large arrays? self.diffsh = ( self.shmat - (1 - self.vegshmat) * (1 - model_params.Tree_settings.Value.Transmissivity) ).astype(np.float32) - """ - self.diffsh = np.zeros((raster_data.rows, raster_data.cols, self.shmat.shape[2])) - for i in range(0, self.shmat.shape[2]): - self.diffsh[:, :, i] = self.shmat[:, :, i] - (1 - self.vegshmat[:, :, i]) * ( - 1 - model_params.Tree_settings.Value.Transmissivity - ) - """ - logger.info("Shadow matrices with vegetation loaded.") + if not tile_spec: + logger.info("Shadow matrices with vegetation loaded.") else: self.diffsh = self.shmat - logger.info("Shadow matrices loaded (no vegetation).") + if not tile_spec: + logger.info("Shadow matrices loaded (no vegetation).") # Estimate number of patches based on shadow matrices if self.shmat.shape[2] == 145: @@ -781,7 +972,7 @@ def __init__( self.asvf = np.arccos(np.sqrt(svf_data.svf)) # Empty array for steradians - self.steradians = np.zeros(self.shmat.shape[2]) + self.steradians = np.zeros(self.shmat.shape[2], dtype=np.float32) else: # no anisotropic sky # downstream functions only access these if use_aniso is True @@ -793,7 +984,8 @@ def __init__( self.asvf = None self.patch_option = 0 self.steradians = 0 - logger.info("Anisotropic sky not used; shadow matrices not loaded.") + if not tile_spec: + logger.info("Anisotropic sky not used; shadow matrices not loaded.") class TgMaps: @@ -818,18 +1010,18 @@ class TgMaps: Tgmap1N: np.ndarray TgOut1: np.ndarray - def __init__(self, use_landcover: bool, model_params, raster_data: RasterData): + def __init__(self, use_landcover: bool, model_params, raster_data: RasterData, quiet: bool = False): """ This is a vectorized version that avoids looping over pixels. """ # Initialization of maps - self.Knight = np.zeros((raster_data.rows, raster_data.cols)) - self.Tgmap1 = np.zeros((raster_data.rows, raster_data.cols)) - self.Tgmap1E = np.zeros((raster_data.rows, raster_data.cols)) - self.Tgmap1S = np.zeros((raster_data.rows, raster_data.cols)) - self.Tgmap1W = np.zeros((raster_data.rows, raster_data.cols)) - self.Tgmap1N = np.zeros((raster_data.rows, raster_data.cols)) - self.TgOut1 = np.zeros((raster_data.rows, raster_data.cols)) + self.Knight = np.zeros((raster_data.rows, raster_data.cols), dtype=np.float32) + self.Tgmap1 = np.zeros((raster_data.rows, raster_data.cols), dtype=np.float32) + self.Tgmap1E = np.zeros((raster_data.rows, raster_data.cols), dtype=np.float32) + self.Tgmap1S = np.zeros((raster_data.rows, raster_data.cols), dtype=np.float32) + self.Tgmap1W = np.zeros((raster_data.rows, raster_data.cols), dtype=np.float32) + self.Tgmap1N = np.zeros((raster_data.rows, raster_data.cols), dtype=np.float32) + self.TgOut1 = np.zeros((raster_data.rows, raster_data.cols), dtype=np.float32) # Set up the Tg maps based on whether land cover is used if use_landcover is False: @@ -841,7 +1033,8 @@ def __init__(self, use_landcover: bool, model_params, raster_data: RasterData): self.TgK_wall = model_params.Ts_deg.Value.Walls self.Tstart_wall = model_params.Tstart.Value.Walls self.TmaxLST_wall = model_params.TmaxLST.Value.Walls - logger.info("TgMaps initialized with default (no land cover) parameters.") + if not quiet: + logger.info("TgMaps initialized with default (no land cover) parameters.") else: if raster_data.lcgrid is None: raise ValueError("Land cover grid is not available.") @@ -865,9 +1058,16 @@ def __init__(self, use_landcover: bool, model_params, raster_data: RasterData): name_to_emissivity = {name: getattr(model_params.Emissivity.Value, name) for name in id_to_name.values()} name_to_tmaxlst = {name: getattr(model_params.TmaxLST.Value, name) for name in id_to_name.values()} name_to_tsdeg = {name: getattr(model_params.Ts_deg.Value, name) for name in id_to_name.values()} + # Check for invalid land cover IDs in the grid + unique_lc_ids = np.unique(lc_grid[~np.isnan(lc_grid)]) + invalid_ids = set(unique_lc_ids) - set(valid_ids) + if invalid_ids: + logger.warning(f"Land cover grid contains invalid IDs: {sorted(invalid_ids)}. These will be ignored.") # Perform replacements for each valid land cover ID for i in valid_ids: mask = lc_grid == i + if not np.any(mask): + continue name = id_to_name[i] self.Tstart[mask] = name_to_tstart[name] self.alb_grid[mask] = name_to_albedo[name] @@ -878,7 +1078,8 @@ def __init__(self, use_landcover: bool, model_params, raster_data: RasterData): self.TgK_wall = getattr(model_params.Ts_deg.Value, "Walls", None) self.Tstart_wall = getattr(model_params.Tstart.Value, "Walls", None) self.TmaxLST_wall = getattr(model_params.TmaxLST.Value, "Walls", None) - logger.info("TgMaps initialized using land cover grid.") + if not quiet: + logger.info("TgMaps initialized using land cover grid.") class WallsData: @@ -889,7 +1090,7 @@ class WallsData: timeStep: int walls_scheme: np.ndarray dirwalls_scheme: np.ndarray - met_for_xarray: Optional[Tuple[Any]] + met_for_xarray: Optional[Any] def __init__( self, @@ -898,13 +1099,20 @@ def __init__( raster_data: RasterData, weather_data: EnvironData, tg_maps: TgMaps, + tile_spec: Optional[TileSpec] = None, ): if model_configs.use_wall_scheme: - logger.info("Loading wall scheme data from %s", model_configs.wall_path) + if not tile_spec: + logger.info("Loading wall scheme data from %s", model_configs.wall_path) wall_path_str = str(common.check_path(model_configs.wall_path, make_dir=False)) wallData = np.load(wall_path_str) # - self.voxelMaps = wallData["voxelId"] + if tile_spec: + row_slice, col_slice = tile_spec.full_slice + self.voxelMaps = wallData["voxelId"][row_slice, col_slice] + else: + self.voxelMaps = wallData["voxelId"] + self.voxelTable = wallData["voxelTable"] # Get wall type # TODO: @@ -917,6 +1125,10 @@ def __init__( self.walls_scheme.copy(), raster_data.scale, raster_data.dsm, None, 100.0 / 180.0 ) # Calculate timeStep + if len(weather_data.YYYY) < 2: + raise ValueError( + "Weather data must contain at least 2 timesteps to calculate timeStep for wall scheme." + ) first_timestep = ( pd.to_datetime(weather_data.YYYY[0], format="%Y") + pd.to_timedelta(weather_data.DOY[0] - 1, unit="d") @@ -950,12 +1162,407 @@ def __init__( + pd.to_timedelta(weather_data.hours, unit="h") + pd.to_timedelta(weather_data.minu, unit="m") ) - logger.info("Wall scheme data loaded and processed.") + if not tile_spec: + logger.info("Wall scheme data loaded and processed.") else: self.voxelMaps = None self.voxelTable = None self.timeStep = 0 - self.walls_scheme = np.ones((raster_data.rows, raster_data.cols)) * 10.0 - self.dirwalls_scheme = np.ones((raster_data.rows, raster_data.cols)) * 10.0 + self.walls_scheme = np.ones((raster_data.rows, raster_data.cols), dtype=np.float32) * 10.0 + self.dirwalls_scheme = np.ones((raster_data.rows, raster_data.cols), dtype=np.float32) * 10.0 self.met_for_xarray = None - logger.info("Wall scheme not used; default wall data initialized.") + if not tile_spec: + logger.info("Wall scheme not used; default wall data initialized.") + + +class TiledRasterData(RasterData): + """ + Tiled version of RasterData that uses lazy loading. + + This class extends RasterData to support tiled loading of large rasters, + reducing memory usage by loading tiles on demand. + """ + + def __init__( + self, + model_configs: SolweigConfig, + model_params, + svf_data, # Can be SvfData or TiledSvfData + amax_local_window_m: int = 100, + amax_local_perc: float = 99.9, + tile_manager=None, # Optional: pass in an existing TileManager + ): + """Initialize tiled raster data with lazy loading.""" + from .tile_manager import LazyRasterLoader, TileManager + + if model_configs.dsm_path is None: + raise ValueError("DSM path must be provided before initializing raster data.") + if model_configs.wh_path is None or model_configs.wa_path is None: + raise ValueError("Wall height and aspect rasters must be provided.") + if model_configs.output_dir is None: + raise ValueError("Output directory must be configured before initializing raster data.") + + # Load only metadata without reading full raster data + # Use a small sample to get dimensions and transform + sample_dsm, trf_arr, crs_wkt, nd_val = common.load_raster( + model_configs.dsm_path, bbox=None, coerce_f64_to_f32=True + ) + + # Store metadata + self.trf_arr = trf_arr + self.crs_wkt = crs_wkt + self.nd_val = nd_val + self.scale = 1 / trf_arr[1] + self.rows = sample_dsm.shape[0] + self.cols = sample_dsm.shape[1] + pixel_size = trf_arr[1] + + # Store configuration for lazy amax calculation + self.model_configs = model_configs + self.model_params = model_params + self.amax_local_window_m = amax_local_window_m + self.amax_local_perc = amax_local_perc + + # Calculate a conservative global amaxvalue for tile overlap + # Use the sample data we already loaded + if model_configs.dem_path is None: + # Without DEM, use DSM range as conservative estimate + global_amax = float(np.nanmax(sample_dsm) - np.nanmin(sample_dsm)) + else: + # With DEM, estimate from sample (conservative) + # Load just a sample of DEM for quick estimate + sample_dem, _, _, _ = common.load_raster(model_configs.dem_path, bbox=None, coerce_f64_to_f32=True) + height_diff = sample_dsm - sample_dem + global_amax = float(np.nanpercentile(height_diff[~np.isnan(height_diff)], 99.9)) + + # Add safety margin and cap at reasonable maximum + global_amax = min(global_amax * 1.2, 200.0) # 20% safety margin, max 200m + self.amaxvalue = global_amax + + # Clean up sample data to free memory immediately + del sample_dsm + if model_configs.dem_path is not None and "sample_dem" in locals(): + del sample_dem + + # Get or create tile manager + if tile_manager is not None: + # Use provided tile manager and update with shadow-aware amax + self.tile_manager = tile_manager + self.tile_manager.amaxvalue = global_amax + self.tile_manager._generate_tiles() # Regenerate with new overlap + logger.info( + f"Using provided TileManager, updated with amax={global_amax:.1f}m, " + f"{len(self.tile_manager.tiles)} tiles" + ) + else: + # Create new tile manager with conservative overlap + self.tile_manager = TileManager( + rows=self.rows, + cols=self.cols, + tile_size=model_configs.tile_size, + amaxvalue=global_amax, + pixel_size=pixel_size, + ) + + logger.info( + f"TiledRasterData initialized: {self.rows}x{self.cols}, " + f"tile_size={model_configs.tile_size}, {len(self.tile_manager.tiles)} tiles, " + f"conservative amax={global_amax:.1f}m for overlap calculation" + ) + + # Create lazy loaders for rasters (but don't load data yet) + self.dsm_loader = LazyRasterLoader(model_configs.dsm_path, self.tile_manager) + self.wh_loader = LazyRasterLoader(model_configs.wh_path, self.tile_manager) + self.wa_loader = LazyRasterLoader(model_configs.wa_path, self.tile_manager) + + # Optional rasters + self.dem_loader = None + if model_configs.dem_path: + self.dem_loader = LazyRasterLoader(model_configs.dem_path, self.tile_manager) + + self.cdsm_loader = None + self.tdsm_loader = None + if model_configs.use_veg_dem and model_configs.cdsm_path: + self.cdsm_loader = LazyRasterLoader(model_configs.cdsm_path, self.tile_manager) + if model_configs.tdsm_path: + self.tdsm_loader = LazyRasterLoader(model_configs.tdsm_path, self.tile_manager) + + self.lc_loader = None + if model_configs.use_landcover and model_configs.lc_path: + self.lc_loader = LazyRasterLoader(model_configs.lc_path, self.tile_manager) + + logger.info("TiledRasterData loaders initialized (data not loaded yet)") + logger.info("Note: Direct property access (e.g., .dsm) is disabled. Use load_tile() instead.") + + def compute_tile_amaxvalue(self, tile_idx: int) -> float: + """ + Compute amaxvalue dynamically for a specific tile. + + This loads only the tile data and computes the local amax, + which is more accurate than the global conservative estimate. + + Args: + tile_idx: Index of tile + + Returns: + Local amaxvalue for the tile in meters + """ + tile_data = self.load_tile(tile_idx) + + dsm_tile = tile_data["dsm"] + dem_tile = tile_data["dem"] + cdsm_tile = tile_data["cdsm"] + tdsm_tile = tile_data["tdsm"] + + # Compute local amax for this tile + _, _, _, _, tile_amax = raster_preprocessing( + dsm_tile, + dem_tile, + cdsm_tile, + tdsm_tile, + self.model_params.Tree_settings.Value.Trunk_ratio, + self.trf_arr[1], + self.amax_local_window_m, + self.amax_local_perc, + quiet=True, + ) + + return tile_amax + + def load_tile(self, tile_idx: int, preprocess: bool = False) -> dict: + """ + Load all raster data for a specific tile. + + Args: + tile_idx: Index of tile to load + preprocess: If True, apply raster preprocessing to tile data + + Returns: + Dictionary with tile data for all rasters + """ + tile_spec = self.tile_manager.get_tile(tile_idx) + + tile_data = { + "tile_spec": tile_spec, + "dsm": self.dsm_loader.load_tile(tile_idx), + "wallheight": self.wh_loader.load_tile(tile_idx), + "wallaspect": self.wa_loader.load_tile(tile_idx), + } + + if self.dem_loader: + tile_data["dem"] = self.dem_loader.load_tile(tile_idx) + else: + tile_data["dem"] = None + + if self.cdsm_loader: + tile_data["cdsm"] = self.cdsm_loader.load_tile(tile_idx) + else: + tile_data["cdsm"] = None + + if self.tdsm_loader: + tile_data["tdsm"] = self.tdsm_loader.load_tile(tile_idx) + else: + tile_data["tdsm"] = None + + if self.lc_loader: + tile_data["lcgrid"] = self.lc_loader.load_tile(tile_idx) + else: + tile_data["lcgrid"] = None + + # Apply preprocessing if requested + if preprocess: + dsm, dem, cdsm, tdsm, tile_amax = raster_preprocessing( + tile_data["dsm"], + tile_data["dem"], + tile_data["cdsm"], + tile_data["tdsm"], + self.model_params.Tree_settings.Value.Trunk_ratio, + self.trf_arr[1], + self.amax_local_window_m, + self.amax_local_perc, + quiet=True, + ) + + # Update tile data with preprocessed arrays + tile_data["dsm"] = dsm + tile_data["dem"] = dem + tile_data["cdsm"] = cdsm + tile_data["tdsm"] = tdsm + tile_data["tile_amax"] = tile_amax + + # Compute derived properties for tile + if self.model_configs.use_veg_dem and cdsm is not None and tdsm is not None: + tile_data["bush"] = np.ascontiguousarray(np.logical_not(tdsm * cdsm) * cdsm, dtype=np.float32) + else: + tile_data["bush"] = np.zeros(tile_data["dsm"].shape, dtype=np.float32) + + # Compute buildings for tile + if not self.model_configs.use_dem_for_buildings and tile_data["lcgrid"] is not None: + lcgrid = tile_data["lcgrid"] + buildings = np.copy(lcgrid) + buildings[buildings == 7] = 1 + buildings[buildings == 6] = 1 + buildings[buildings == 5] = 1 + buildings[buildings == 4] = 1 + buildings[buildings == 3] = 1 + buildings[buildings == 2] = 0 + tile_data["buildings"] = np.ascontiguousarray(buildings, dtype=np.float32) + elif self.model_configs.use_dem_for_buildings and dem is not None: + height_diff = dsm - dem + buildings = np.where( + ~np.isnan(dem) & ~np.isnan(dsm), + height_diff, + np.float32(0.0), + ) + buildings[buildings < 2.0] = 1.0 + buildings[buildings >= 2.0] = 0.0 + tile_data["buildings"] = np.ascontiguousarray(buildings, dtype=np.float32) + else: + tile_data["buildings"] = None + + return tile_data + + def clear_cache(self): + """Clear all cached tile data to free memory.""" + self.dsm_loader.clear_cache() + self.wh_loader.clear_cache() + self.wa_loader.clear_cache() + if self.dem_loader: + self.dem_loader.clear_cache() + if self.cdsm_loader: + self.cdsm_loader.clear_cache() + if self.tdsm_loader: + self.tdsm_loader.clear_cache() + if self.lc_loader: + self.lc_loader.clear_cache() + + +class TiledSvfData(SvfData): + """ + Tiled version of SvfData that uses lazy loading. + + This class extends SvfData to support tiled loading of SVF rasters, + reducing memory usage by loading tiles on demand. + """ + + @staticmethod + def create_tile_manager(model_configs: SolweigConfig): + """ + Create an independent TileManager based on raster dimensions. + + Args: + model_configs: SOLWEIG configuration + + Returns: + TileManager instance + """ + + # Get raster dimensions from DSM + dsm_arr, trf_arr, _, _ = common.load_raster(model_configs.dsm_path) + rows, cols = dsm_arr.shape + pixel_size = trf_arr[1] + + # Create tile manager (amaxvalue will be updated later by TiledRasterData) + tile_manager = TileManager( + rows=rows, + cols=cols, + tile_size=model_configs.tile_size, + pixel_size=pixel_size, + amaxvalue=0.0, # Placeholder, will be updated by TiledRasterData + ) + logger.info("Created independent TileManager: %d tiles", len(tile_manager.tiles)) + return tile_manager + + def __init__(self, model_configs: SolweigConfig, tile_manager): + """Initialize tiled SVF data with lazy loading.""" + from .tile_manager import LazyRasterLoader + + logger.info("Loading SVF data (tiled) from %s", model_configs.svf_path) + svf_path_str = str(common.check_path(model_configs.svf_path, make_dir=False)) + in_path_str = str(common.check_path(model_configs.working_dir, make_dir=False)) + + # Unzip SVF files + with zipfile.ZipFile(svf_path_str, "r") as zip_ref: + zip_ref.extractall(in_path_str) + + # Store the provided tile manager + self.tile_manager = tile_manager + logger.info("Using provided TileManager for SVF data: %d tiles", len(self.tile_manager.tiles)) + + # Create lazy loaders for SVF rasters + self.svf_loader = LazyRasterLoader(in_path_str + "/svf.tif", self.tile_manager) + self.svf_east_loader = LazyRasterLoader(in_path_str + "/svfE.tif", self.tile_manager) + self.svf_south_loader = LazyRasterLoader(in_path_str + "/svfS.tif", self.tile_manager) + self.svf_west_loader = LazyRasterLoader(in_path_str + "/svfW.tif", self.tile_manager) + self.svf_north_loader = LazyRasterLoader(in_path_str + "/svfN.tif", self.tile_manager) + + if model_configs.use_veg_dem: + self.svf_veg_loader = LazyRasterLoader(in_path_str + "/svfveg.tif", self.tile_manager) + self.svf_veg_east_loader = LazyRasterLoader(in_path_str + "/svfEveg.tif", self.tile_manager) + self.svf_veg_south_loader = LazyRasterLoader(in_path_str + "/svfSveg.tif", self.tile_manager) + self.svf_veg_west_loader = LazyRasterLoader(in_path_str + "/svfWveg.tif", self.tile_manager) + self.svf_veg_north_loader = LazyRasterLoader(in_path_str + "/svfNveg.tif", self.tile_manager) + self.svf_veg_blocks_bldg_sh_loader = LazyRasterLoader(in_path_str + "/svfaveg.tif", self.tile_manager) + self.svf_veg_blocks_bldg_sh_east_loader = LazyRasterLoader(in_path_str + "/svfEaveg.tif", self.tile_manager) + self.svf_veg_blocks_bldg_sh_south_loader = LazyRasterLoader( + in_path_str + "/svfSaveg.tif", self.tile_manager + ) + self.svf_veg_blocks_bldg_sh_west_loader = LazyRasterLoader(in_path_str + "/svfWaveg.tif", self.tile_manager) + self.svf_veg_blocks_bldg_sh_north_loader = LazyRasterLoader( + in_path_str + "/svfNaveg.tif", self.tile_manager + ) + self.use_veg = True + else: + self.use_veg = False + + logger.info("TiledSvfData loaders initialized (data not loaded yet)") + logger.info("Use load_tile(tile_idx) for memory-efficient tile-based access") + logger.info("Note: Direct property access (e.g., .svf) is disabled. Use load_tile() instead.") + + def load_tile(self, tile_idx: int) -> dict: + """ + Load all SVF data for a specific tile. + + Returns: + Dictionary with tile data for all SVF rasters + """ + tile_data = { + "svf": self.svf_loader.load_tile(tile_idx), + "svf_east": self.svf_east_loader.load_tile(tile_idx), + "svf_south": self.svf_south_loader.load_tile(tile_idx), + "svf_west": self.svf_west_loader.load_tile(tile_idx), + "svf_north": self.svf_north_loader.load_tile(tile_idx), + } + + if self.use_veg: + tile_data["svf_veg"] = self.svf_veg_loader.load_tile(tile_idx) + tile_data["svf_veg_east"] = self.svf_veg_east_loader.load_tile(tile_idx) + tile_data["svf_veg_south"] = self.svf_veg_south_loader.load_tile(tile_idx) + tile_data["svf_veg_west"] = self.svf_veg_west_loader.load_tile(tile_idx) + tile_data["svf_veg_north"] = self.svf_veg_north_loader.load_tile(tile_idx) + tile_data["svf_veg_blocks_bldg_sh"] = self.svf_veg_blocks_bldg_sh_loader.load_tile(tile_idx) + tile_data["svf_veg_blocks_bldg_sh_east"] = self.svf_veg_blocks_bldg_sh_east_loader.load_tile(tile_idx) + tile_data["svf_veg_blocks_bldg_sh_south"] = self.svf_veg_blocks_bldg_sh_south_loader.load_tile(tile_idx) + tile_data["svf_veg_blocks_bldg_sh_west"] = self.svf_veg_blocks_bldg_sh_west_loader.load_tile(tile_idx) + tile_data["svf_veg_blocks_bldg_sh_north"] = self.svf_veg_blocks_bldg_sh_north_loader.load_tile(tile_idx) + + return tile_data + + def clear_cache(self): + """Clear all cached tile data to free memory.""" + self.svf_loader.clear_cache() + self.svf_east_loader.clear_cache() + self.svf_south_loader.clear_cache() + self.svf_west_loader.clear_cache() + self.svf_north_loader.clear_cache() + if self.use_veg: + self.svf_veg_loader.clear_cache() + self.svf_veg_east_loader.clear_cache() + self.svf_veg_south_loader.clear_cache() + self.svf_veg_west_loader.clear_cache() + self.svf_veg_north_loader.clear_cache() + self.svf_veg_blocks_bldg_sh_loader.clear_cache() + self.svf_veg_blocks_bldg_sh_east_loader.clear_cache() + self.svf_veg_blocks_bldg_sh_south_loader.clear_cache() + self.svf_veg_blocks_bldg_sh_west_loader.clear_cache() + self.svf_veg_blocks_bldg_sh_north_loader.clear_cache() diff --git a/umep/common.py b/umep/common.py index f8a7b95..00aa79a 100644 --- a/umep/common.py +++ b/umep/common.py @@ -1,4 +1,7 @@ import logging +import math +import os +import sys from pathlib import Path import numpy as np @@ -6,34 +9,243 @@ logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) -try: + +def _detect_osgeo_environment() -> bool: + """ + Detect if we're running in an OSGeo4W or QGIS environment. + + These environments have their own GDAL installation and pip-installed + rasterio will cause DLL conflicts on Windows. + """ + # Check for QGIS + if "qgis" in sys.modules or "qgis.core" in sys.modules: + return True + + # Check for QGIS environment variables + if any(key in os.environ for key in ("QGIS_PREFIX_PATH", "QGIS_DEBUG")): + return True + + # Check for OSGeo4W environment + if "OSGEO4W_ROOT" in os.environ: + return True + + # Check if Python executable is inside OSGeo4W or QGIS directory (Windows) + exe_path = sys.executable.lower() + return any(marker in exe_path for marker in ("osgeo4w", "qgis")) + + +def _try_import_gdal() -> bool: + """Try to import GDAL and return True if successful.""" + try: + from osgeo import gdal, osr # noqa: F401 + + return True + except (ImportError, OSError) as e: + logger.debug(f"GDAL import failed: {e}") + return False + + +def _try_import_rasterio() -> bool: + """ + Try to import rasterio and return True if successful. + + This catches both ImportError and OSError (DLL load failures). + """ + try: + import pyproj # noqa: F401 + import rasterio # noqa: F401 + from rasterio.features import rasterize # noqa: F401 + from rasterio.mask import mask # noqa: F401 + from rasterio.transform import Affine, from_origin # noqa: F401 + from rasterio.windows import Window # noqa: F401 + from shapely import geometry # noqa: F401 + + return True + except (ImportError, OSError) as e: + logger.debug(f"Rasterio import failed: {e}") + return False + + +def _setup_geospatial_backend() -> bool: + """ + Set up the geospatial backend (rasterio or GDAL). + + Returns GDAL_ENV: True if using GDAL, False if using rasterio. + + Priority: + 1. UMEP_USE_GDAL=1 environment variable forces GDAL + 2. In OSGeo4W/QGIS environments: prefer GDAL (avoids DLL conflicts) + 3. Otherwise: try rasterio first, fall back to GDAL + """ + # Allow forcing GDAL via environment variable + if os.environ.get("UMEP_USE_GDAL", "").lower() in ("1", "true", "yes"): + if _try_import_gdal(): + logger.info("Using GDAL for raster operations (forced via UMEP_USE_GDAL).") + return True + else: + raise ImportError( + "UMEP_USE_GDAL is set but GDAL could not be imported. " + "Install GDAL or unset UMEP_USE_GDAL." + ) + + # In OSGeo4W/QGIS: prefer GDAL to avoid DLL conflicts + in_osgeo = _detect_osgeo_environment() + if in_osgeo: + logger.debug("Detected OSGeo4W/QGIS environment, preferring GDAL backend.") + if _try_import_gdal(): + logger.info("Using GDAL for raster operations (OSGeo4W/QGIS environment).") + return True + # GDAL should always be available in OSGeo4W/QGIS, but fall back just in case + logger.warning("GDAL import failed in OSGeo4W/QGIS environment, trying rasterio...") + if _try_import_rasterio(): + logger.info("Using rasterio for raster operations.") + return False + raise ImportError( + "Failed to import both GDAL and rasterio in OSGeo4W/QGIS environment.\n" + "This is unexpected - GDAL should be available. Check your installation." + ) + + # Standard environment: prefer rasterio, fall back to GDAL + if _try_import_rasterio(): + logger.info("Using rasterio for raster operations.") + return False + + logger.warning("Rasterio import failed, trying GDAL...") + if _try_import_gdal(): + logger.info("Using GDAL for raster operations.") + return True + + # Neither worked + raise ImportError( + "Neither rasterio nor GDAL could be imported.\n" + "Install with: pip install rasterio\n" + "Or for QGIS/OSGeo4W environments, ensure GDAL is properly configured." + ) + + +# Determine which backend to use +GDAL_ENV = _setup_geospatial_backend() + +# Now do the actual imports based on the backend +if GDAL_ENV: + from osgeo import gdal, osr +else: import pyproj import rasterio from rasterio.features import rasterize from rasterio.mask import mask from rasterio.transform import Affine, from_origin + from rasterio.windows import Window from shapely import geometry - GDAL_ENV = False - logger.info("Using rasterio for raster operations.") -except: - from osgeo import gdal +FLOAT_TOLERANCE = 1e-9 + + +def _assert_north_up(transform) -> None: + """Ensure the raster transform describes a north-up raster.""" + if hasattr(transform, "b") and hasattr(transform, "d"): + if not math.isclose(transform.b, 0.0, abs_tol=FLOAT_TOLERANCE) or not math.isclose( + transform.d, 0.0, abs_tol=FLOAT_TOLERANCE + ): + raise ValueError("Only north-up rasters (no rotation) are supported.") + else: + # GDAL-style tuple (c, a, b, f, d, e) + if len(transform) < 6: + raise ValueError("Transform must contain 6 elements.") + if not math.isclose(transform[2], 0.0, abs_tol=FLOAT_TOLERANCE) or not math.isclose( + transform[4], 0.0, abs_tol=FLOAT_TOLERANCE + ): + raise ValueError("Only north-up rasters (no rotation) are supported.") + + +def _shrink_axis_to_grid(min_val: float, max_val: float, origin: float, pixel_size: float) -> tuple[float, float]: + if pixel_size == 0: + raise ValueError("Pixel size must be non-zero to shrink bbox to pixel grid.") + step = abs(pixel_size) + start_idx = math.ceil(((min_val - origin) / step) - FLOAT_TOLERANCE) + end_idx = math.floor(((max_val - origin) / step) + FLOAT_TOLERANCE) + new_min = origin + start_idx * step + new_max = origin + end_idx * step + if not new_max > new_min: + raise ValueError("Bounding box collapsed after snapping to the pixel grid.") + return new_min, new_max + + +def shrink_bbox_to_pixel_grid( + bbox: tuple[float, float, float, float], + origin_x: float, + origin_y: float, + pixel_width: float, + pixel_height: float, +) -> tuple[float, float, float, float]: + """Shrink bbox so its edges land on the pixel grid defined by the raster origin.""" + + minx, miny, maxx, maxy = bbox + if minx >= maxx or miny >= maxy: + raise ValueError("Bounding box is invalid (min must be < max for both axes).") + snapped_minx, snapped_maxx = _shrink_axis_to_grid(minx, maxx, origin_x, pixel_width) + snapped_miny, snapped_maxy = _shrink_axis_to_grid(miny, maxy, origin_y, pixel_height) + return snapped_minx, snapped_miny, snapped_maxx, snapped_maxy + + +def _bounds_to_tuple(bounds) -> tuple[float, float, float, float]: + if hasattr(bounds, "left"): + return bounds.left, bounds.bottom, bounds.right, bounds.top + return tuple(bounds) + + +def _validate_bbox_within_bounds( + bbox: tuple[float, float, float, float], bounds, *, tol: float = FLOAT_TOLERANCE +) -> None: + minx, miny, maxx, maxy = bbox + left, bottom, right, top = _bounds_to_tuple(bounds) + if minx < left - tol or maxx > right + tol or miny < bottom - tol or maxy > top + tol: + raise ValueError("Bounding box is not fully contained within the raster dataset bounds") + + +def _compute_bounds_from_transform(transform, width: int, height: int) -> tuple[float, float, float, float]: + """Return raster bounds for a GDAL-style transform tuple.""" + left = transform[0] + top = transform[3] + right = transform[0] + width * transform[1] + bottom = transform[3] + height * transform[5] + minx = min(left, right) + maxx = max(left, right) + miny = min(top, bottom) + maxy = max(top, bottom) + return minx, miny, maxx, maxy + - GDAL_ENV = True - logger.info("Using GDAL for raster operations.") +def _normalise_bbox(bbox_sequence) -> tuple[float, float, float, float]: + try: + minx, miny, maxx, maxy = bbox_sequence + except Exception as exc: # noqa: BLE001 + raise ValueError("Bounding box must contain exactly four numeric values") from exc + return float(minx), float(miny), float(maxx), float(maxy) def rasterise_gdf(gdf, geom_col, ht_col, bbox=None, pixel_size: int = 1): # Define raster parameters if bbox is not None: # Unpack bbox values - minx, miny, maxx, maxy = bbox + minx, miny, maxx, maxy = _normalise_bbox(bbox) else: # Use the total bounds of the GeoDataFrame - minx, miny, maxx, maxy = gdf.total_bounds - width = int((maxx - minx) / pixel_size) - height = int((maxy - miny) / pixel_size) + minx, miny, maxx, maxy = map(float, gdf.total_bounds) + if pixel_size <= 0: + raise ValueError("Pixel size must be a positive number.") + minx, miny, maxx, maxy = shrink_bbox_to_pixel_grid( + (minx, miny, maxx, maxy), + origin_x=minx, + origin_y=maxy, + pixel_width=pixel_size, + pixel_height=pixel_size, + ) + width = int(round((maxx - minx) / pixel_size)) + height = int(round((maxy - miny) / pixel_size)) + if width <= 0 or height <= 0: + raise ValueError("Bounding box collapsed after snapping to pixel grid.") transform = from_origin(minx, maxy, pixel_size, pixel_size) # Create a blank array for the raster raster = np.zeros((height, width), dtype=np.float32) @@ -61,8 +273,29 @@ def check_path(path_str: str | Path, make_dir: bool = False) -> Path: def save_raster( - out_path_str: str, data_arr: np.ndarray, trf_arr: list[float], crs_wkt: str, no_data_val: float = -9999 + out_path_str: str, + data_arr: np.ndarray, + trf_arr: list[float], + crs_wkt: str, + no_data_val: float = -9999, + coerce_f64_to_f32: bool = True, ): + """ + Save raster to GeoTIFF. + + Args: + out_path_str: Output file path + data_arr: 2D numpy array to save + trf_arr: GDAL-style geotransform [top_left_x, pixel_width, rotation, top_left_y, rotation, pixel_height] + crs_wkt: CRS in WKT format + no_data_val: No-data value to use + coerce_f64_to_f32: If True, convert float64 arrays to float32 before saving + (default: True for memory efficiency) + """ + # Only convert float64 to float32, leave ints/bools unchanged + if coerce_f64_to_f32 and data_arr.dtype == np.float64: + data_arr = data_arr.astype(np.float32) + attempts = 2 while attempts > 0: attempts -= 1 @@ -89,89 +322,198 @@ def save_raster( ) as dst: dst.write(data_arr, 1) else: - # Map numpy dtype to GDAL type - dtype_map = { - np.dtype("uint8"): gdal.GDT_Byte, - np.dtype("int16"): gdal.GDT_Int16, - np.dtype("uint16"): gdal.GDT_UInt16, - np.dtype("int32"): gdal.GDT_Int32, - np.dtype("uint32"): gdal.GDT_UInt32, - np.dtype("float32"): gdal.GDT_Float32, - np.dtype("float64"): gdal.GDT_Float64, - } - gdal_dtype = dtype_map.get(data_arr.dtype, gdal.GDT_Float32) driver = gdal.GetDriverByName("GTiff") - ds = driver.Create(str(out_path), width, height, 1, gdal_dtype) - # trf is a list: [top left x, w-e pixel size, rotation, top left y, rotation, n-s pixel size] - ds.SetGeoTransform(tuple(trf_arr)) - # GetProjection returns WKT (string) + ds = driver.Create(str(out_path), width, height, 1, gdal.GDT_Float32) + ds.SetGeoTransform(trf_arr) if crs_wkt: ds.SetProjection(crs_wkt) band = ds.GetRasterBand(1) - band.WriteArray(data_arr, 0, 0) band.SetNoDataValue(no_data_val) - ds.FlushCache() + band.WriteArray(data_arr) ds = None + return except Exception as e: - print(f"Error saving raster, attempts left {attempts}: {e}") if attempts == 0: raise e + logger.warning(f"Failed to save raster to {out_path_str}: {e}. Retrying...") + + +def get_raster_metadata(path_str: str | Path) -> dict: + """ + Get raster metadata without loading the whole file. + Returns dict with keys: rows, cols, transform, crs, nodata, res. + Transform is always a list [c, a, b, f, d, e] (GDAL-style). + CRS is always a WKT string (or None). + """ + path = check_path(path_str) + if GDAL_ENV is False: + with rasterio.open(path) as src: + # Convert Affine to GDAL-style list + trf = src.transform + transform_list = [trf.c, trf.a, trf.b, trf.f, trf.d, trf.e] + # Convert CRS to WKT string + crs_wkt = src.crs.to_wkt() if src.crs is not None else None + return { + "rows": src.height, + "cols": src.width, + "transform": transform_list, + "crs": crs_wkt, + "nodata": src.nodata, + "res": src.res, # (xres, yres) + "bounds": src.bounds, + } + else: + ds = gdal.Open(str(path)) + if ds is None: + raise OSError(f"Could not open {path}") + gt = ds.GetGeoTransform() + return { + "rows": ds.RasterYSize, + "cols": ds.RasterXSize, + "transform": gt, + "crs": ds.GetProjection() or None, + "nodata": ds.GetRasterBand(1).GetNoDataValue(), + "res": (gt[1], abs(gt[5])), # Approximate resolution + } + + +def read_raster_window(path_str: str | Path, window: tuple[slice, slice], band: int = 1) -> np.ndarray: + """ + Read a window from a raster file. + window is (row_slice, col_slice). + """ + path = check_path(path_str) + row_slice, col_slice = window + + # Handle None slices (read full dimension) + # This is tricky without knowing full shape, so we assume caller provides valid slices + # or we'd need to open file to check shape first. + # For now, assume valid integer slices. + + if GDAL_ENV is False: + with rasterio.open(path) as src: + # rasterio Window(col_off, row_off, width, height) + # Slices are start:stop + r_start = row_slice.start if row_slice.start is not None else 0 + r_stop = row_slice.stop if row_slice.stop is not None else src.height + c_start = col_slice.start if col_slice.start is not None else 0 + c_stop = col_slice.stop if col_slice.stop is not None else src.width + + win = Window( + col_off=c_start, + row_off=r_start, + width=c_stop - c_start, + height=r_stop - r_start, + ) + return src.read(band, window=win) + else: + ds = gdal.Open(str(path)) + if ds is None: + raise OSError(f"Could not open {path}") + + r_start = row_slice.start if row_slice.start is not None else 0 + r_stop = row_slice.stop if row_slice.stop is not None else ds.RasterYSize + c_start = col_slice.start if col_slice.start is not None else 0 + c_stop = col_slice.stop if col_slice.stop is not None else ds.RasterXSize + + xoff = c_start + yoff = r_start + xsize = c_stop - c_start + ysize = r_stop - r_start + + return ds.GetRasterBand(band).ReadAsArray(xoff, yoff, xsize, ysize) def load_raster( - path_str: str, bbox: list[int] | None = None, band: int = 0 + path_str: str, bbox: list[int] | None = None, band: int = 0, coerce_f64_to_f32: bool = True ) -> tuple[np.ndarray, list[float], str | None, float | None]: + """ + Load raster, optionally crop to bbox. + + Args: + path_str: Path to raster file + bbox: Optional bounding box [minx, miny, maxx, maxy] + band: Band index to read (0-based) + coerce_f64_to_f32: If True, coerce array to float32 (default: True for memory efficiency) + + Returns: + Tuple of (array, transform, crs_wkt, no_data_value) + """ # Load raster, optionally crop to bbox path = check_path(path_str, make_dir=False) if not path.exists(): raise FileNotFoundError(f"Raster file {path} does not exist.") if GDAL_ENV is False: with rasterio.open(path) as dataset: + _assert_north_up(dataset.transform) crs_wkt = dataset.crs.to_wkt() if dataset.crs is not None else None - dataset_bounds = dataset.bounds no_data_val = dataset.nodata + transform = dataset.transform if bbox is not None: - # Create bbox geometry for masking - bbox_geom = geometry.box(*bbox) - if not ( - dataset_bounds.left <= bbox[0] <= dataset_bounds.right - and dataset_bounds.left <= bbox[2] <= dataset_bounds.right - and dataset_bounds.bottom <= bbox[1] <= dataset_bounds.top - and dataset_bounds.bottom <= bbox[3] <= dataset_bounds.top - ): - raise ValueError("Bounding box is not fully contained within the raster dataset bounds") + bbox_tuple = _normalise_bbox(bbox) + snapped_bbox = shrink_bbox_to_pixel_grid( + bbox_tuple, + origin_x=transform.c, + origin_y=transform.f, + pixel_width=transform.a, + pixel_height=transform.e, + ) + _validate_bbox_within_bounds(snapped_bbox, dataset.bounds) + bbox_geom = geometry.box(*snapped_bbox) rast, trf = mask(dataset, [bbox_geom], crop=True) else: rast = dataset.read() - trf = dataset.transform + trf = transform # Convert rasterio Affine to GDAL-style list trf_arr = [trf.c, trf.a, trf.b, trf.f, trf.d, trf.e] # rast shape: (bands, rows, cols) if rast.ndim == 3: if band < 0 or band >= rast.shape[0]: raise IndexError(f"Requested band {band} out of range; raster has {rast.shape[0]} band(s)") - rast_arr = rast[band].astype(float) + rast_arr = rast[band] + # Only convert float64 to float32, leave ints/bools unchanged + if coerce_f64_to_f32 and rast_arr.dtype == np.float64: + rast_arr = rast_arr.astype(np.float32) else: - rast_arr = rast.astype(float) + rast_arr = rast + # Only convert float64 to float32, leave ints/bools unchanged + if coerce_f64_to_f32 and rast_arr.dtype == np.float64: + rast_arr = rast_arr.astype(np.float32) else: dataset = gdal.Open(str(path)) if dataset is None: raise FileNotFoundError(f"Could not open {path}") trf = dataset.GetGeoTransform() + _assert_north_up(trf) # GetProjection returns WKT string (or empty string) crs_wkt = dataset.GetProjection() or None rb = dataset.GetRasterBand(band + 1) if rb is None: dataset = None raise IndexError(f"Requested band {band} out of range in GDAL dataset") - rast_arr = rb.ReadAsArray().astype(float) + rast_arr = rb.ReadAsArray() + # Only convert float64 to float32, leave ints/bools unchanged + if coerce_f64_to_f32 and rast_arr.dtype == np.float64: + rast_arr = rast_arr.astype(np.float32) no_data_val = rb.GetNoDataValue() if bbox is not None: - min_x, min_y, max_x, max_y = bbox - xoff = int((min_x - trf[0]) / trf[1]) - yoff = int((trf[3] - max_y) / abs(trf[5])) - xsize = int((max_x - min_x) / trf[1]) - ysize = int((max_y - min_y) / abs(trf[5])) + bbox_tuple = _normalise_bbox(bbox) + snapped_bbox = shrink_bbox_to_pixel_grid( + bbox_tuple, + origin_x=trf[0], + origin_y=trf[3], + pixel_width=trf[1], + pixel_height=trf[5], + ) + bounds = _compute_bounds_from_transform(trf, dataset.RasterXSize, dataset.RasterYSize) + _validate_bbox_within_bounds(snapped_bbox, bounds) + min_x, min_y, max_x, max_y = snapped_bbox + pixel_width = trf[1] + pixel_height = abs(trf[5]) + xoff = int(round((min_x - trf[0]) / pixel_width)) + yoff = int(round((trf[3] - max_y) / pixel_height)) + xsize = int(round((max_x - min_x) / pixel_width)) + ysize = int(round((max_y - min_y) / pixel_height)) # guard offsets/sizes if xoff < 0 or yoff < 0 or xsize <= 0 or ysize <= 0: dataset = None @@ -222,3 +564,94 @@ def xy_to_lnglat(crs_wkt: str | None, x, y): except Exception: logger.exception("Failed to transform coordinates") raise + + +def create_empty_raster( + path_str: str | Path, + rows: int, + cols: int, + transform: list[float], + crs_wkt: str, + dtype=np.float32, + nodata: float = -9999, + bands: int = 1, +): + """ + Create an empty GeoTIFF file initialized with nodata. + """ + path = check_path(path_str, make_dir=True) + + if GDAL_ENV is False: + trf = Affine.from_gdal(*transform) + crs = None + if crs_wkt: + crs = pyproj.CRS(crs_wkt) + + with rasterio.open( + path, + "w", + driver="GTiff", + height=rows, + width=cols, + count=bands, + dtype=dtype, + crs=crs, + transform=trf, + nodata=nodata, + ) as dst: + pass # Just create + else: + driver = gdal.GetDriverByName("GTiff") + # Map numpy dtype to GDAL type + gdal_type = gdal.GDT_Float32 # Default + if dtype == np.float64: + gdal_type = gdal.GDT_Float64 + elif dtype == np.int32: + gdal_type = gdal.GDT_Int32 + elif dtype == np.int16: + gdal_type = gdal.GDT_Int16 + elif dtype == np.uint8: + gdal_type = gdal.GDT_Byte + + ds = driver.Create(str(path), cols, rows, bands, gdal_type) + ds.SetGeoTransform(transform) + if crs_wkt: + ds.SetProjection(crs_wkt) + for b in range(1, bands + 1): + band = ds.GetRasterBand(b) + band.SetNoDataValue(nodata) + band.Fill(nodata) + ds = None + + +def write_raster_window(path_str: str | Path, data: np.ndarray, window: tuple[slice, slice], band: int = 1): + """ + Write a data array to a specific window in an existing raster. + window is (row_slice, col_slice). + """ + path = check_path(path_str) + row_slice, col_slice = window + + if GDAL_ENV is False: + from rasterio.windows import Window + + with rasterio.open(path, "r+") as dst: + win = Window( + col_off=col_slice.start, + row_off=row_slice.start, + width=col_slice.stop - col_slice.start, + height=row_slice.stop - row_slice.start, + ) + dst.write(data, band, window=win) + else: + ds = gdal.Open(str(path), gdal.GA_Update) + if ds is None: + raise OSError(f"Could not open {path} for update") + + xoff = col_slice.start + yoff = row_slice.start + xsize = col_slice.stop - col_slice.start + ysize = row_slice.stop - row_slice.start + + ds.GetRasterBand(band).WriteArray(data, xoff, yoff) + ds = None diff --git a/umep/doctor.py b/umep/doctor.py new file mode 100644 index 0000000..508060a --- /dev/null +++ b/umep/doctor.py @@ -0,0 +1,180 @@ +"""Diagnostic tool to check umep installation and dependencies.""" + +import sys + + +def parse_version(v: str) -> tuple: + """Parse version string to comparable tuple.""" + try: + return tuple(int(x) for x in v.split(".")[:2]) + except (ValueError, AttributeError): + return (0, 0) + + +def check_gdal_compatibility(rasterio_version: str, gdal_version: str) -> tuple[bool, str]: + """ + Check if rasterio and GDAL versions are compatible. + + Rasterio wheels bundle a specific GDAL version. If osgeo.gdal is also installed + with a different GDAL version, DLL conflicts can occur on Windows. + + Rather than maintaining a static compatibility matrix, we check: + 1. If both rasterio and osgeo.gdal are present, their GDAL versions should match + 2. The bundled GDAL version is reported for informational purposes + """ + # For now, just report what we found - the mismatch detection happens elsewhere + return True, f"rasterio {rasterio_version} bundles GDAL {gdal_version}" + + +def check_environment(): + """Check the umep environment and report any issues.""" + print(f"Python: {sys.executable}") + print(f"Version: {sys.version}") + print() + + issues = [] + rasterio_version = None + rasterio_gdal_version = None + osgeo_gdal_version = None + + # Check for conflicting GDAL installations + print("Checking dependencies...") + print("-" * 40) + + # Check rasterio + try: + import rasterio + + rasterio_version = rasterio.__version__ + rasterio_gdal_version = rasterio.gdal_version() + print(f"✓ rasterio {rasterio_version}") + print(f" Bundled GDAL: {rasterio_gdal_version}") + except ImportError as e: + print(f"✗ rasterio: {e}") + issues.append("rasterio") + except Exception as e: + print(f"✗ rasterio (DLL/loading error): {e}") + issues.append("rasterio-dll") + + # Check if osgeo is also installed (potential conflict) + try: + from osgeo import gdal + + osgeo_gdal_version_raw = gdal.VersionInfo("VERSION_NUM") + # Convert from format like "3100200" to "3.10.2" + v = int(osgeo_gdal_version_raw) + osgeo_gdal_version = f"{v // 1000000}.{(v // 10000) % 100}.{(v // 100) % 100}" + print(f"! osgeo.gdal installed (version {osgeo_gdal_version})") + + # Check for version mismatch + if rasterio_gdal_version and osgeo_gdal_version: + rio_gdal_tuple = parse_version(rasterio_gdal_version) + osgeo_gdal_tuple = parse_version(osgeo_gdal_version) + + if rio_gdal_tuple != osgeo_gdal_tuple: + print(f" WARNING: GDAL version mismatch!") + print(f" rasterio bundled: {rasterio_gdal_version}") + print(f" osgeo.gdal: {osgeo_gdal_version}") + print(" This WILL cause DLL conflicts on Windows.") + if sys.platform == "win32": + issues.append("gdal-mismatch") + else: + print(f" GDAL versions match ({rasterio_gdal_version}) - OK") + + except ImportError: + print(" (osgeo.gdal not installed - this is fine)") + except Exception as e: + print(f"! osgeo.gdal installed but failed to load: {e}") + if sys.platform == "win32": + issues.append("osgeo-dll") + + # Check rasterio/GDAL compatibility + if rasterio_version and rasterio_gdal_version: + compatible, msg = check_gdal_compatibility(rasterio_version, rasterio_gdal_version) + if not compatible: + print(f" WARNING: {msg}") + issues.append("version-mismatch") + else: + print(f" {msg}") + + # Check other key dependencies + print() + print("Other dependencies:") + for pkg, import_name in [ + ("pyproj", "pyproj"), + ("shapely", "shapely"), + ("geopandas", "geopandas"), + ("numpy", "numpy"), + ("scipy", "scipy"), + ("xarray", "xarray"), + ("rioxarray", "rioxarray"), + ]: + try: + mod = __import__(import_name) + ver = getattr(mod, "__version__", "unknown") + print(f"✓ {pkg} {ver}") + + # Extra checks for specific packages + if pkg == "pyproj": + try: + import pyproj + proj_ver = pyproj.proj_version_str + print(f" PROJ version: {proj_ver}") + except Exception: + pass + elif pkg == "shapely": + try: + import shapely + geos_ver = shapely.geos_version_string + print(f" GEOS version: {geos_ver}") + except Exception: + pass + except ImportError as e: + print(f"✗ {pkg}: {e}") + issues.append(pkg) + + print("-" * 40) + + # Check if running in OSGeo4W + if sys.platform == "win32": + import os + + path = os.environ.get("PATH", "") + if "OSGeo4W" in path or "QGIS" in path: + print() + print("WARNING: OSGeo4W/QGIS detected in PATH") + print("This can cause DLL conflicts with pip-installed packages.") + print("Recommendation: Use a separate virtual environment:") + print(" uv venv && uv pip install umep") + issues.append("osgeo4w-path") + + print() + if issues: + print(f"Issues found: {', '.join(issues)}") + print() + print("Troubleshooting:") + if any(i in issues for i in ["rasterio-dll", "gdal-mismatch", "osgeo4w-path", "osgeo-dll"]): + print("• DLL/GDAL conflicts detected. Create a clean virtual environment:") + print(" uv venv --python 3.12") + print(" uv pip install umep") + print() + print("• Or use conda-forge for consistent binaries:") + print(" conda create -n umep -c conda-forge python=3.12 rasterio geopandas") + print(" conda activate umep && pip install umep") + if "version-mismatch" in issues: + print() + print("• Version incompatibility detected. Try upgrading:") + print(" pip install --upgrade rasterio") + return 1 + else: + print("All checks passed!") + return 0 + + +def main(): + """Entry point for umep-doctor command.""" + sys.exit(check_environment()) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/umep/functions/SOLWEIGpython/Solweig_2025a_calc_forprocessing.py b/umep/functions/SOLWEIGpython/Solweig_2025a_calc_forprocessing.py index 1abcf2f..fc63ce2 100644 --- a/umep/functions/SOLWEIGpython/Solweig_2025a_calc_forprocessing.py +++ b/umep/functions/SOLWEIGpython/Solweig_2025a_calc_forprocessing.py @@ -1,48 +1,133 @@ -from __future__ import absolute_import +from copy import deepcopy import numpy as np -from .daylen import daylen + from ...util.SEBESOLWEIGCommonFiles.clearnessindex_2013b import clearnessindex_2013b +from ...util.SEBESOLWEIGCommonFiles.create_patches import create_patches from ...util.SEBESOLWEIGCommonFiles.diffusefraction import diffusefraction +from ...util.SEBESOLWEIGCommonFiles.Perez_v3 import Perez_v3 from ...util.SEBESOLWEIGCommonFiles.shadowingfunction_wallheight_13 import shadowingfunction_wallheight_13 from ...util.SEBESOLWEIGCommonFiles.shadowingfunction_wallheight_23 import shadowingfunction_wallheight_23 -from .gvf_2018a import gvf_2018a +from .anisotropic_sky import anisotropic_sky as ani_sky from .cylindric_wedge import cylindric_wedge -from .TsWaveDelay_2015a import TsWaveDelay_2015a -from .Kup_veg_2015a import Kup_veg_2015a +from .daylen import daylen +from .gvf_2018a import gvf_2018a + # from .Lside_veg_v2015a import Lside_veg_v2015a # from .Kside_veg_v2019a import Kside_veg_v2019a from .Kside_veg_v2022a import Kside_veg_v2022a -from ...util.SEBESOLWEIGCommonFiles.Perez_v3 import Perez_v3 -from ...util.SEBESOLWEIGCommonFiles.create_patches import create_patches +from .Kup_veg_2015a import Kup_veg_2015a # Anisotropic longwave -from .Lcyl_v2022a import Lcyl_v2022a from .Lside_veg_v2022a import Lside_veg_v2022a -from .anisotropic_sky import anisotropic_sky as ani_sky from .patch_radiation import patch_steradians -from copy import deepcopy -import time +from .TsWaveDelay_2015a import TsWaveDelay_2015a # Wall surface temperature scheme from .wall_surface_temperature import wall_surface_temperature -def Solweig_2025a_calc(i, dsm, scale, rows, cols, svf, svfN, svfW, svfE, svfS, svfveg, svfNveg, svfEveg, svfSveg, - svfWveg, svfaveg, svfEaveg, svfSaveg, svfWaveg, svfNaveg, vegdem, vegdem2, albedo_b, absK, absL, - ewall, Fside, Fup, Fcyl, altitude, azimuth, zen, jday, usevegdem, onlyglobal, buildings, location, psi, - landcover, lc_grid, dectime, altmax, dirwalls, walls, cyl, elvis, Ta, RH, radG, radD, radI, P, - amaxvalue, bush, Twater, TgK, Tstart, alb_grid, emis_grid, TgK_wall, Tstart_wall, TmaxLST, - TmaxLST_wall, first, second, svfalfa, svfbuveg, firstdaytime, timeadd, timestepdec, Tgmap1, - Tgmap1E, Tgmap1S, Tgmap1W, Tgmap1N, CI, TgOut1, diffsh, shmat, vegshmat, vbshvegshmat, anisotropic_sky, asvf, patch_option, - voxelMaps, voxelTable, ws, wallScheme, timeStep, steradians, walls_scheme, dirwalls_scheme): - -#def Solweig_2021a_calc(i, dsm, scale, rows, cols, svf, svfN, svfW, svfE, svfS, svfveg, svfNveg, svfEveg, svfSveg, -# svfWveg, svfaveg, svfEaveg, svfSaveg, svfWaveg, svfNaveg, vegdem, vegdem2, albedo_b, absK, absL, -# ewall, Fside, Fup, Fcyl, altitude, azimuth, zen, jday, usevegdem, onlyglobal, buildings, location, psi, -# landcover, lc_grid, dectime, altmax, dirwalls, walls, cyl, elvis, Ta, RH, radG, radD, radI, P, -# amaxvalue, bush, Twater, TgK, Tstart, alb_grid, emis_grid, TgK_wall, Tstart_wall, TmaxLST, -# TmaxLST_wall, first, second, svfalfa, svfbuveg, firstdaytime, timeadd, timestepdec, Tgmap1, -# Tgmap1E, Tgmap1S, Tgmap1W, Tgmap1N, CI, TgOut1, diffsh, ani): + +def Solweig_2025a_calc( + i, + dsm, + scale, + rows, + cols, + svf, + svfN, + svfW, + svfE, + svfS, + svfveg, + svfNveg, + svfEveg, + svfSveg, + svfWveg, + svfaveg, + svfEaveg, + svfSaveg, + svfWaveg, + svfNaveg, + vegdem, + vegdem2, + albedo_b, + absK, + absL, + ewall, + Fside, + Fup, + Fcyl, + altitude, + azimuth, + zen, + jday, + usevegdem, + onlyglobal, + buildings, + location, + psi, + landcover, + lc_grid, + dectime, + altmax, + dirwalls, + walls, + cyl, + elvis, + Ta, + RH, + radG, + radD, + radI, + P, + amaxvalue, + bush, + Twater, + TgK, + Tstart, + alb_grid, + emis_grid, + TgK_wall, + Tstart_wall, + TmaxLST, + TmaxLST_wall, + first, + second, + svfalfa, + svfbuveg, + firstdaytime, + timeadd, + timestepdec, + Tgmap1, + Tgmap1E, + Tgmap1S, + Tgmap1W, + Tgmap1N, + CI, + TgOut1, + diffsh, + shmat, + vegshmat, + vbshvegshmat, + anisotropic_sky, + asvf, + patch_option, + voxelMaps, + voxelTable, + ws, + wallScheme, + timeStep, + steradians, + walls_scheme, + dirwalls_scheme, +): + # def Solweig_2021a_calc(i, dsm, scale, rows, cols, svf, svfN, svfW, svfE, svfS, svfveg, svfNveg, svfEveg, svfSveg, + # svfWveg, svfaveg, svfEaveg, svfSaveg, svfWaveg, svfNaveg, vegdem, vegdem2, albedo_b, absK, absL, + # ewall, Fside, Fup, Fcyl, altitude, azimuth, zen, jday, usevegdem, onlyglobal, buildings, location, psi, + # landcover, lc_grid, dectime, altmax, dirwalls, walls, cyl, elvis, Ta, RH, radG, radD, radI, P, + # amaxvalue, bush, Twater, TgK, Tstart, alb_grid, emis_grid, TgK_wall, Tstart_wall, TmaxLST, + # TmaxLST_wall, first, second, svfalfa, svfbuveg, firstdaytime, timeadd, timestepdec, Tgmap1, + # Tgmap1E, Tgmap1S, Tgmap1W, Tgmap1N, CI, TgOut1, diffsh, ani): # This is the core function of the SOLWEIG model # 2016-Aug-28 @@ -93,47 +178,47 @@ def Solweig_2025a_calc(i, dsm, scale, rows, cols, svf, svfN, svfW, svfE, svfS, s # amaxvalue = max height of buildings # bush = grid representing bushes # Twater = temperature of water (daily) - # TgK, Tstart, TgK_wall, Tstart_wall, TmaxLST,TmaxLST_wall, + # TgK, Tstart, TgK_wall, Tstart_wall, TmaxLST,TmaxLST_wall, # alb_grid, emis_grid = albedo and emmissivity on ground # first, second = conneted to old Ts model (source area based on Smidt et al.) # svfalfa = SVF recalculated to angle - # svfbuveg = complete SVF - # firstdaytime, timeadd, timestepdec, Tgmap1, Tgmap1E, Tgmap1S, Tgmap1W, Tgmap1N, + # svfbuveg = complete SVF + # firstdaytime, timeadd, timestepdec, Tgmap1, Tgmap1E, Tgmap1S, Tgmap1W, Tgmap1N, # CI = Clearness index # TgOut1 = old Ts model # diffsh, ani = Used in anisotrpic models (Wallenberg et al. 2019, 2022) # # # Core program start # # # # Instrument offset in degrees - t = 0. + t = 0.0 # Stefan Bolzmans Constant SBC = 5.67051e-8 # Degrees to radians - deg2rad = np.pi/180 + deg2rad = np.pi / 180 # Find sunrise decimal hour - new from 2014a - _, _, _, SNUP = daylen(jday, location['latitude']) + _, _, _, SNUP = daylen(jday, location["latitude"]) # Vapor pressure - ea = 6.107 * 10 ** ((7.5 * Ta) / (237.3 + Ta)) * (RH / 100.) + ea = 6.107 * 10 ** ((7.5 * Ta) / (237.3 + Ta)) * (RH / 100.0) # Determination of clear - sky emissivity from Prata (1996) msteg = 46.5 * (ea / (Ta + 273.15)) esky = (1 - (1 + msteg) * np.exp(-((1.2 + 3.0 * msteg) ** 0.5))) + elvis # -0.04 old error from Jonsson et al.2006 - if altitude > 0: # # # # # # DAYTIME # # # # # # + if altitude > 0: # # # # # # DAYTIME # # # # # # # Clearness Index on Earth's surface after Crawford and Dunchon (1999) with a correction # factor for low sun elevations after Lindberg et al.(2008) - I0, CI, Kt, I0et, CIuncorr = clearnessindex_2013b(zen, jday, Ta, RH / 100., radG, location, P) - if (CI > 1) or (CI == np.inf): + I0, CI, Kt, I0et, CIuncorr = clearnessindex_2013b(zen, jday, Ta, RH / 100.0, radG, location, P) + if (CI > 1) or (np.inf == CI): CI = 1 # Estimation of radD and radI if not measured after Reindl et al.(1990) if onlyglobal == 1: - I0, CI, Kt, I0et, CIuncorr = clearnessindex_2013b(zen, jday, Ta, RH / 100., radG, location, P) - if (CI > 1) or (CI == np.inf): + I0, CI, Kt, I0et, CIuncorr = clearnessindex_2013b(zen, jday, Ta, RH / 100.0, radG, location, P) + if (CI > 1) or (np.inf == CI): CI = 1 radI, radD = diffusefraction(radG, altitude, Kt, Ta, RH) @@ -142,15 +227,15 @@ def Solweig_2025a_calc(i, dsm, scale, rows, cols, svf, svfN, svfW, svfE, svfS, s # Anisotropic Diffuse Radiation after Perez et al. 1993 if anisotropic_sky == 1: patchchoice = 1 - zenDeg = zen*(180/np.pi) + zenDeg = zen * (180 / np.pi) # Relative luminance - lv, pc_, pb_ = Perez_v3(zenDeg, azimuth, radD, radI, jday, patchchoice, patch_option) + lv, pc_, pb_ = Perez_v3(zenDeg, azimuth, radD, radI, jday, patchchoice, patch_option) # Total relative luminance from sky, i.e. from each patch, into each cell - aniLum = np.zeros((rows, cols)) + aniLum = np.zeros((rows, cols), dtype=np.float32) for idx in range(lv.shape[0]): - aniLum += diffsh[:,:,idx] * lv[idx,2] + aniLum += diffsh[:, :, idx] * lv[idx, 2] - dRad = aniLum * radD # Total diffuse radiation from sky into each cell + dRad = aniLum * radD # Total diffuse radiation from sky into each cell else: dRad = radD * svfbuveg patchchoice = 1 @@ -158,29 +243,46 @@ def Solweig_2025a_calc(i, dsm, scale, rows, cols, svf, svfN, svfW, svfE, svfS, s # Shadow images if usevegdem == 1: - vegsh, sh, _, wallsh, wallsun, wallshve, _, facesun, wallsh_ = shadowingfunction_wallheight_23(dsm, vegdem, vegdem2, - azimuth, altitude, scale, amaxvalue, bush, walls, dirwalls * np.pi / 180., walls_scheme, dirwalls_scheme * np.pi/180.) + vegsh, sh, _, wallsh, wallsun, wallshve, _, facesun, wallsh_ = shadowingfunction_wallheight_23( + dsm, + vegdem, + vegdem2, + azimuth, + altitude, + scale, + amaxvalue, + bush, + walls, + dirwalls * np.pi / 180.0, + walls_scheme, + dirwalls_scheme * np.pi / 180.0, + ) shadow = sh - (1 - vegsh) * (1 - psi) else: - sh, wallsh, wallsun, facesh, facesun = shadowingfunction_wallheight_13(dsm, azimuth, altitude, scale, - walls, dirwalls * np.pi / 180.) + sh, wallsh, wallsun, facesh, facesun = shadowingfunction_wallheight_13( + dsm, azimuth, altitude, scale, walls, dirwalls * np.pi / 180.0 + ) shadow = sh # # # Surface temperature parameterisation during daytime # # # # # new using max sun alt.instead of dfm # Tgamp = (TgK * altmax - Tstart) + Tstart # Old - Tgamp = TgK * altmax + Tstart # Fixed 2021 + Tgamp = TgK * altmax + Tstart # Fixed 2021 # Tgampwall = (TgK_wall * altmax - (Tstart_wall)) + (Tstart_wall) # Old Tgampwall = TgK_wall * altmax + Tstart_wall - Tg = Tgamp * np.sin((((dectime - np.floor(dectime)) - SNUP / 24) / (TmaxLST / 24 - SNUP / 24)) * np.pi / 2) # 2015 a, based on max sun altitude - Tgwall = Tgampwall * np.sin((((dectime - np.floor(dectime)) - SNUP / 24) / (TmaxLST_wall / 24 - SNUP / 24)) * np.pi / 2) # 2015a, based on max sun altitude + Tg = Tgamp * np.sin( + (((dectime - np.floor(dectime)) - SNUP / 24) / (TmaxLST / 24 - SNUP / 24)) * np.pi / 2 + ) # 2015 a, based on max sun altitude + Tgwall = Tgampwall * np.sin( + (((dectime - np.floor(dectime)) - SNUP / 24) / (TmaxLST_wall / 24 - SNUP / 24)) * np.pi / 2 + ) # 2015a, based on max sun altitude if Tgwall < 0: # temporary for removing low Tg during morning 20130205 # Tg = 0 Tgwall = 0 # New estimation of Tg reduction for non - clear situation based on Reindl et al.1990 - radI0, _ = diffusefraction(I0, altitude, 1., Ta, RH) + radI0, _ = diffusefraction(I0, altitude, 1.0, Ta, RH) corr = 0.1473 * np.log(90 - (zen / np.pi * 180)) + 0.3454 # 20070329 correction of lat, Lindberg et al. 2008 CI_Tg = (radG / radI0) + (1 - corr) if (CI_Tg > 1) or (CI_Tg == np.inf): @@ -190,7 +292,7 @@ def Solweig_2025a_calc(i, dsm, scale, rows, cols, svf, svfN, svfW, svfE, svfS, s CI_TgG = (radG / radG0) + (1 - corr) if (CI_TgG > 1) or (CI_TgG == np.inf): CI_TgG = 1 - + # Tg = Tg * CI_Tg # new estimation # Tgwall = Tgwall * CI_Tg Tg = Tg * CI_TgG # new estimation @@ -199,10 +301,47 @@ def Solweig_2025a_calc(i, dsm, scale, rows, cols, svf, svfN, svfW, svfE, svfS, s Tg[Tg < 0] = 0 # temporary for removing low Tg during morning 20130205 # # # # Ground View Factors # # # # - gvfLup, gvfalb, gvfalbnosh, gvfLupE, gvfalbE, gvfalbnoshE, gvfLupS, gvfalbS, gvfalbnoshS, gvfLupW, gvfalbW,\ - gvfalbnoshW, gvfLupN, gvfalbN, gvfalbnoshN, gvfSum, gvfNorm = gvf_2018a(wallsun, walls, buildings, scale, shadow, first, - second, dirwalls, Tg, Tgwall, Ta, emis_grid, ewall, alb_grid, SBC, albedo_b, rows, cols, - Twater, lc_grid, landcover) + ( + gvfLup, + gvfalb, + gvfalbnosh, + gvfLupE, + gvfalbE, + gvfalbnoshE, + gvfLupS, + gvfalbS, + gvfalbnoshS, + gvfLupW, + gvfalbW, + gvfalbnoshW, + gvfLupN, + gvfalbN, + gvfalbnoshN, + gvfSum, + gvfNorm, + ) = gvf_2018a( + wallsun, + walls, + buildings, + scale, + shadow, + first, + second, + dirwalls, + Tg, + Tgwall, + Ta, + emis_grid, + ewall, + alb_grid, + SBC, + albedo_b, + rows, + cols, + Twater, + lc_grid, + landcover, + ) # # # # Lup, daytime # # # # # Surface temperature wave delay - new as from 2014a @@ -211,50 +350,102 @@ def Solweig_2025a_calc(i, dsm, scale, rows, cols, svf, svfN, svfW, svfE, svfS, s LupS, timeaddnotused, Tgmap1S = TsWaveDelay_2015a(gvfLupS, firstdaytime, timeadd, timestepdec, Tgmap1S) LupW, timeaddnotused, Tgmap1W = TsWaveDelay_2015a(gvfLupW, firstdaytime, timeadd, timestepdec, Tgmap1W) LupN, timeaddnotused, Tgmap1N = TsWaveDelay_2015a(gvfLupN, firstdaytime, timeadd, timestepdec, Tgmap1N) - + # # For Tg output in POIs TgTemp = Tg * shadow + Ta - TgOut, timeadd, TgOut1 = TsWaveDelay_2015a(TgTemp, firstdaytime, timeadd, timestepdec, TgOut1) #timeadd only here v2021a + TgOut, timeadd, TgOut1 = TsWaveDelay_2015a( + TgTemp, firstdaytime, timeadd, timestepdec, TgOut1 + ) # timeadd only here v2021a # Building height angle from svf F_sh = cylindric_wedge(zen, svfalfa, rows, cols) # Fraction shadow on building walls based on sun alt and svf F_sh[np.isnan(F_sh)] = 0.5 # # # # # # # Calculation of shortwave daytime radiative fluxes # # # # # # # - Kdown = radI * shadow * np.sin(altitude * (np.pi / 180)) + dRad + albedo_b * (1 - svfbuveg) * \ - (radG * (1 - F_sh) + radD * F_sh) # *sin(altitude(i) * (pi / 180)) + Kdown = ( + radI * shadow * np.sin(altitude * (np.pi / 180)) + + dRad + + albedo_b * (1 - svfbuveg) * (radG * (1 - F_sh) + radD * F_sh) + ) # *sin(altitude(i) * (pi / 180)) + + Kup, KupE, KupS, KupW, KupN = Kup_veg_2015a( + radI, + radD, + radG, + altitude, + svfbuveg, + albedo_b, + F_sh, + gvfalb, + gvfalbE, + gvfalbS, + gvfalbW, + gvfalbN, + gvfalbnosh, + gvfalbnoshE, + gvfalbnoshS, + gvfalbnoshW, + gvfalbnoshN, + ) + + Keast, Ksouth, Kwest, Knorth, KsideI, KsideD, Kside = Kside_veg_v2022a( + radI, + radD, + radG, + shadow, + svfS, + svfW, + svfN, + svfE, + svfEveg, + svfSveg, + svfWveg, + svfNveg, + azimuth, + altitude, + psi, + t, + albedo_b, + F_sh, + KupE, + KupS, + KupW, + KupN, + cyl, + lv, + anisotropic_sky, + diffsh, + rows, + cols, + asvf, + shmat, + vegshmat, + vbshvegshmat, + ) - Kup, KupE, KupS, KupW, KupN = Kup_veg_2015a(radI, radD, radG, altitude, svfbuveg, albedo_b, F_sh, gvfalb, - gvfalbE, gvfalbS, gvfalbW, gvfalbN, gvfalbnosh, gvfalbnoshE, gvfalbnoshS, gvfalbnoshW, gvfalbnoshN) - - Keast, Ksouth, Kwest, Knorth, KsideI, KsideD, Kside = Kside_veg_v2022a(radI, radD, radG, shadow, svfS, svfW, svfN, svfE, - svfEveg, svfSveg, svfWveg, svfNveg, azimuth, altitude, psi, t, albedo_b, F_sh, KupE, KupS, KupW, - KupN, cyl, lv, anisotropic_sky, diffsh, rows, cols, asvf, shmat, vegshmat, vbshvegshmat) - firstdaytime = 0 else: # # # # # # # NIGHTTIME # # # # # # # # - Tgwall = 0 # CI_Tg = -999 # F_sh = [] # Nocturnal K fluxes set to 0 - Knight = np.zeros((rows, cols)) - Kdown = np.zeros((rows, cols)) - Kwest = np.zeros((rows, cols)) - Kup = np.zeros((rows, cols)) - Keast = np.zeros((rows, cols)) - Ksouth = np.zeros((rows, cols)) - Knorth = np.zeros((rows, cols)) - KsideI = np.zeros((rows, cols)) - KsideD = np.zeros((rows, cols)) - F_sh = np.zeros((rows, cols)) - Tg = np.zeros((rows, cols)) - shadow = np.zeros((rows, cols)) + Knight = np.zeros((rows, cols), dtype=np.float32) + Kdown = np.zeros((rows, cols), dtype=np.float32) + Kwest = np.zeros((rows, cols), dtype=np.float32) + Kup = np.zeros((rows, cols), dtype=np.float32) + Keast = np.zeros((rows, cols), dtype=np.float32) + Ksouth = np.zeros((rows, cols), dtype=np.float32) + Knorth = np.zeros((rows, cols), dtype=np.float32) + KsideI = np.zeros((rows, cols), dtype=np.float32) + KsideD = np.zeros((rows, cols), dtype=np.float32) + F_sh = np.zeros((rows, cols), dtype=np.float32) + Tg = np.zeros((rows, cols), dtype=np.float32) + shadow = np.zeros((rows, cols), dtype=np.float32) CI_Tg = deepcopy(CI) CI_TgG = deepcopy(CI) - dRad = np.zeros((rows,cols)) - Kside = np.zeros((rows,cols)) + dRad = np.zeros((rows, cols), dtype=np.float32) + Kside = np.zeros((rows, cols), dtype=np.float32) # # # # Lup # # # # Lup = SBC * emis_grid * ((Knight + Ta + Tg + 273.15) ** 4) @@ -274,35 +465,70 @@ def Solweig_2025a_calc(i, dsm, scale, rows, cols, svf, svfN, svfW, svfE, svfS, s firstdaytime = 1 # # # # Ldown # # # # - Ldown = (svf + svfveg - 1) * esky * SBC * ((Ta + 273.15) ** 4) + (2 - svfveg - svfaveg) * ewall * SBC * \ - ((Ta + 273.15) ** 4) + (svfaveg - svf) * ewall * SBC * ((Ta + 273.15 + Tgwall) ** 4) + \ - (2 - svf - svfveg) * (1 - ewall) * esky * SBC * ((Ta + 273.15) ** 4) # Jonsson et al.(2006) + Ldown = ( + (svf + svfveg - 1) * esky * SBC * ((Ta + 273.15) ** 4) + + (2 - svfveg - svfaveg) * ewall * SBC * ((Ta + 273.15) ** 4) + + (svfaveg - svf) * ewall * SBC * ((Ta + 273.15 + Tgwall) ** 4) + + (2 - svf - svfveg) * (1 - ewall) * esky * SBC * ((Ta + 273.15) ** 4) + ) # Jonsson et al.(2006) # Ldown = Ldown - 25 # Shown by Jonsson et al.(2006) and Duarte et al.(2006) if CI < 0.95: # non - clear conditions c = 1 - CI - Ldown = Ldown * (1 - c) + c * ((svf + svfveg - 1) * SBC * ((Ta + 273.15) ** 4) + (2 - svfveg - svfaveg) * - ewall * SBC * ((Ta + 273.15) ** 4) + (svfaveg - svf) * ewall * SBC * ((Ta + 273.15 + Tgwall) ** 4) + - (2 - svf - svfveg) * (1 - ewall) * SBC * ((Ta + 273.15) ** 4)) # NOT REALLY TESTED!!! BUT MORE CORRECT? + Ldown = Ldown * (1 - c) + c * ( + (svf + svfveg - 1) * SBC * ((Ta + 273.15) ** 4) + + (2 - svfveg - svfaveg) * ewall * SBC * ((Ta + 273.15) ** 4) + + (svfaveg - svf) * ewall * SBC * ((Ta + 273.15 + Tgwall) ** 4) + + (2 - svf - svfveg) * (1 - ewall) * SBC * ((Ta + 273.15) ** 4) + ) # NOT REALLY TESTED!!! BUT MORE CORRECT? # # # # Lside # # # # - Least, Lsouth, Lwest, Lnorth = Lside_veg_v2022a(svfS, svfW, svfN, svfE, svfEveg, svfSveg, svfWveg, svfNveg, - svfEaveg, svfSaveg, svfWaveg, svfNaveg, azimuth, altitude, Ta, Tgwall, SBC, ewall, Ldown, - esky, t, F_sh, CI, LupE, LupS, LupW, LupN, anisotropic_sky) + Least, Lsouth, Lwest, Lnorth = Lside_veg_v2022a( + svfS, + svfW, + svfN, + svfE, + svfEveg, + svfSveg, + svfWveg, + svfNveg, + svfEaveg, + svfSaveg, + svfWaveg, + svfNaveg, + azimuth, + altitude, + Ta, + Tgwall, + SBC, + ewall, + Ldown, + esky, + t, + F_sh, + CI, + LupE, + LupS, + LupW, + LupN, + anisotropic_sky, + ) # New parameterization scheme for wall temperatures if wallScheme == 1: # albedo_g = 0.15 #TODO Change to correct if altitude < 0: wallsh_ = 0 - voxelTable = wall_surface_temperature(voxelTable, wallsh_, altitude, azimuth, timeStep, radI, radD, radG, Ldown, Lup, Ta, esky) + voxelTable = wall_surface_temperature( + voxelTable, wallsh_, altitude, azimuth, timeStep, radI, radD, radG, Ldown, Lup, Ta, esky + ) # Anisotropic sky - if (anisotropic_sky == 1): - if 'lv' not in locals(): + if anisotropic_sky == 1: + if "lv" not in locals(): # Creating skyvault of patches of constant radians (Tregeneza and Sharples, 1993) skyvaultalt, skyvaultazi, _, _, _, _, _ = create_patches(patch_option) - patch_emissivities = np.zeros(skyvaultalt.shape[0]) + patch_emissivities = np.zeros(skyvaultalt.shape[0], dtype=np.float32) x = np.transpose(np.atleast_2d(skyvaultalt)) y = np.transpose(np.atleast_2d(skyvaultazi)) @@ -320,19 +546,72 @@ def Solweig_2025a_calc(i, dsm, scale, rows, cols, svf, svfN, svfW, svfE, svfS, s # Create lv from L_patches if nighttime, i.e. lv does not exist if altitude < 0: # CI = deepcopy(CI) - lv = deepcopy(L_patches); KupE = 0; KupS = 0; KupW = 0; KupN = 0 + lv = deepcopy(L_patches) + KupE = 0 + KupS = 0 + KupW = 0 + KupN = 0 # Adjust sky emissivity under semi-cloudy/hazy/cloudy/overcast conditions, i.e. CI lower than 0.95 if CI < 0.95: - esky_c = CI * esky + (1 - CI) * 1. + esky_c = CI * esky + (1 - CI) * 1.0 esky = esky_c - Ldown, Lside, Lside_sky, Lside_veg, Lside_sh, Lside_sun, Lside_ref, Least_, Lwest_, Lnorth_, Lsouth_, \ - Keast, Ksouth, Kwest, Knorth, KsideI, KsideD, Kside, steradians, skyalt = ani_sky(shmat, vegshmat, vbshvegshmat, altitude, azimuth, asvf, cyl, esky, - L_patches, wallScheme, voxelTable, voxelMaps, steradians, Ta, Tgwall, ewall, Lup, radI, radD, radG, lv, - albedo_b, 0, diffsh, shadow, KupE, KupS, KupW, KupN, i) + ( + Ldown, + Lside, + Lside_sky, + Lside_veg, + Lside_sh, + Lside_sun, + Lside_ref, + Least_, + Lwest_, + Lnorth_, + Lsouth_, + Keast, + Ksouth, + Kwest, + Knorth, + KsideI, + KsideD, + Kside, + steradians, + skyalt, + ) = ani_sky( + shmat, + vegshmat, + vbshvegshmat, + altitude, + azimuth, + asvf, + cyl, + esky, + L_patches, + wallScheme, + voxelTable, + voxelMaps, + steradians, + Ta, + Tgwall, + ewall, + Lup, + radI, + radD, + radG, + lv, + albedo_b, + 0, + diffsh, + shadow, + KupE, + KupS, + KupW, + KupN, + i, + ) else: - Lside = np.zeros((rows, cols)) + Lside = np.zeros((rows, cols), dtype=np.float32) L_patches = None # Box and anisotropic longwave @@ -344,20 +623,23 @@ def Solweig_2025a_calc(i, dsm, scale, rows, cols, svf, svfN, svfW, svfE, svfS, s # # # # Calculation of radiant flux density and Tmrt # # # # # Human body considered as a cylinder with isotropic all-sky diffuse - if cyl == 1 and anisotropic_sky == 0: - Sstr = absK * (KsideI * Fcyl + (Kdown + Kup) * Fup + (Knorth + Keast + Ksouth + Kwest) * Fside) + absL * \ - ((Ldown + Lup) * Fup + (Lnorth + Least + Lsouth + Lwest) * Fside) - # Human body considered as a cylinder with Perez et al. (1993) (anisotropic sky diffuse) + if cyl == 1 and anisotropic_sky == 0: + Sstr = absK * (KsideI * Fcyl + (Kdown + Kup) * Fup + (Knorth + Keast + Ksouth + Kwest) * Fside) + absL * ( + (Ldown + Lup) * Fup + (Lnorth + Least + Lsouth + Lwest) * Fside + ) + # Human body considered as a cylinder with Perez et al. (1993) (anisotropic sky diffuse) # and Martin and Berdahl (1984) (anisotropic sky longwave) elif cyl == 1 and anisotropic_sky == 1: - Sstr = absK * (Kside * Fcyl + (Kdown + Kup) * Fup + (Knorth + Keast + Ksouth + Kwest) * Fside) + absL * \ - ((Ldown + Lup) * Fup + Lside * Fcyl + (Lnorth + Least + Lsouth + Lwest) * Fside) + Sstr = absK * (Kside * Fcyl + (Kdown + Kup) * Fup + (Knorth + Keast + Ksouth + Kwest) * Fside) + absL * ( + (Ldown + Lup) * Fup + Lside * Fcyl + (Lnorth + Least + Lsouth + Lwest) * Fside + ) # Knorth = nan Ksouth = nan Kwest = nan Keast = nan - else: # Human body considered as a standing cube - Sstr = absK * ((Kdown + Kup) * Fup + (Knorth + Keast + Ksouth + Kwest) * Fside) + absL * \ - ((Ldown + Lup) * Fup + (Lnorth + Least + Lsouth + Lwest) * Fside) + else: # Human body considered as a standing cube + Sstr = absK * ((Kdown + Kup) * Fup + (Knorth + Keast + Ksouth + Kwest) * Fside) + absL * ( + (Ldown + Lup) * Fup + (Lnorth + Least + Lsouth + Lwest) * Fside + ) - Tmrt = np.sqrt(np.sqrt((Sstr / (absL * SBC)))) - 273.2 + Tmrt = np.sqrt(np.sqrt(Sstr / (absL * SBC))) - 273.2 # Add longwave to cardinal directions for output in POI if (cyl == 1) and (anisotropic_sky == 1): @@ -366,7 +648,46 @@ def Solweig_2025a_calc(i, dsm, scale, rows, cols, svf, svfN, svfW, svfE, svfS, s Lnorth += Lnorth_ Lsouth += Lsouth_ - return Tmrt, Kdown, Kup, Ldown, Lup, Tg, ea, esky, I0, CI, shadow, firstdaytime, timestepdec, \ - timeadd, Tgmap1, Tgmap1E, Tgmap1S, Tgmap1W, Tgmap1N, Keast, Ksouth, Kwest, Knorth, Least, \ - Lsouth, Lwest, Lnorth, KsideI, TgOut1, TgOut, radI, radD, \ - Lside, L_patches, CI_Tg, CI_TgG, KsideD, dRad, Kside, steradians, voxelTable + return ( + Tmrt, + Kdown, + Kup, + Ldown, + Lup, + Tg, + ea, + esky, + I0, + CI, + shadow, + firstdaytime, + timestepdec, + timeadd, + Tgmap1, + Tgmap1E, + Tgmap1S, + Tgmap1W, + Tgmap1N, + Keast, + Ksouth, + Kwest, + Knorth, + Least, + Lsouth, + Lwest, + Lnorth, + KsideI, + TgOut1, + TgOut, + radI, + radD, + Lside, + L_patches, + CI_Tg, + CI_TgG, + KsideD, + dRad, + Kside, + steradians, + voxelTable, + ) diff --git a/umep/functions/SOLWEIGpython/Solweig_run.py b/umep/functions/SOLWEIGpython/Solweig_run.py index 4b591aa..a1fdf5a 100644 --- a/umep/functions/SOLWEIGpython/Solweig_run.py +++ b/umep/functions/SOLWEIGpython/Solweig_run.py @@ -62,7 +62,9 @@ def solweig_run(configPath, feedback): standAlone = int(configDict["standalone"]) # Load DSM - dsm_arr, dsm_trf_arr, dsm_crs_wkt, dsm_nd_val = common.load_raster(configDict["filepath_dsm"], bbox=None) + dsm_arr, dsm_trf_arr, dsm_crs_wkt, dsm_nd_val = common.load_raster( + configDict["filepath_dsm"], bbox=None, coerce_f64_to_f32=True + ) # trf is a list: [top left x, w-e pixel size, rotation, top left y, rotation, n-s pixel size] scale = 1 / dsm_trf_arr[1] # pixel resolution in metres left_x = dsm_trf_arr[0] @@ -88,9 +90,9 @@ def solweig_run(configPath, feedback): trunkratio = param["Tree_settings"]["Value"]["Trunk_ratio"] usevegdem = int(configDict["usevegdem"]) if usevegdem == 1: - vegdsm, _, _, _ = common.load_raster(configDict["filepath_cdsm"], bbox=None) + vegdsm, _, _, _ = common.load_raster(configDict["filepath_cdsm"], bbox=None, coerce_f64_to_f32=True) if configDict["filepath_tdsm"] != "": - vegdsm2, _, _, _ = common.load_raster(configDict["filepath_tdsm"], bbox=None) + vegdsm2, _, _, _ = common.load_raster(configDict["filepath_tdsm"], bbox=None, coerce_f64_to_f32=True) else: vegdsm2 = vegdsm * trunkratio else: @@ -100,14 +102,14 @@ def solweig_run(configPath, feedback): # Land cover landcover = int(configDict["landcover"]) if landcover == 1: - lcgrid, _, _, _ = common.load_raster(configDict["filepath_lc"], bbox=None) + lcgrid, _, _, _ = common.load_raster(configDict["filepath_lc"], bbox=None, coerce_f64_to_f32=True) else: lcgrid = 0 # DEM for buildings #TODO: fix nodata in standalone demforbuild = int(configDict["demforbuild"]) if demforbuild == 1: - dem, _, _, dem_nd_val = common.load_raster(configDict["filepath_dem"], bbox=None) + dem, _, _, dem_nd_val = common.load_raster(configDict["filepath_dem"], bbox=None, coerce_f64_to_f32=True) # response to issue and #230 dem[dem == dem_nd_val] = 0.0 if dem.min() < 0: @@ -121,24 +123,44 @@ def solweig_run(configPath, feedback): zip.extractall(configDict["working_dir"]) zip.close() - svf, _, _, _ = common.load_raster(configDict["working_dir"] + "/svf.tif", bbox=None) - svfN, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfN.tif", bbox=None) - svfS, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfS.tif", bbox=None) - svfE, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfE.tif", bbox=None) - svfW, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfW.tif", bbox=None) + svf, _, _, _ = common.load_raster(configDict["working_dir"] + "/svf.tif", bbox=None, coerce_f64_to_f32=True) + svfN, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfN.tif", bbox=None, coerce_f64_to_f32=True) + svfS, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfS.tif", bbox=None, coerce_f64_to_f32=True) + svfE, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfE.tif", bbox=None, coerce_f64_to_f32=True) + svfW, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfW.tif", bbox=None, coerce_f64_to_f32=True) if usevegdem == 1: - svfveg, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfveg.tif", bbox=None) - svfNveg, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfNveg.tif", bbox=None) - svfSveg, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfSveg.tif", bbox=None) - svfEveg, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfEveg.tif", bbox=None) - svfWveg, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfWveg.tif", bbox=None) - - svfaveg, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfaveg.tif", bbox=None) - svfNaveg, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfNaveg.tif", bbox=None) - svfSaveg, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfSaveg.tif", bbox=None) - svfEaveg, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfEaveg.tif", bbox=None) - svfWaveg, _, _, _ = common.load_raster(configDict["working_dir"] + "/svfWaveg.tif", bbox=None) + svfveg, _, _, _ = common.load_raster( + configDict["working_dir"] + "/svfveg.tif", bbox=None, coerce_f64_to_f32=True + ) + svfNveg, _, _, _ = common.load_raster( + configDict["working_dir"] + "/svfNveg.tif", bbox=None, coerce_f64_to_f32=True + ) + svfSveg, _, _, _ = common.load_raster( + configDict["working_dir"] + "/svfSveg.tif", bbox=None, coerce_f64_to_f32=True + ) + svfEveg, _, _, _ = common.load_raster( + configDict["working_dir"] + "/svfEveg.tif", bbox=None, coerce_f64_to_f32=True + ) + svfWveg, _, _, _ = common.load_raster( + configDict["working_dir"] + "/svfWveg.tif", bbox=None, coerce_f64_to_f32=True + ) + + svfaveg, _, _, _ = common.load_raster( + configDict["working_dir"] + "/svfaveg.tif", bbox=None, coerce_f64_to_f32=True + ) + svfNaveg, _, _, _ = common.load_raster( + configDict["working_dir"] + "/svfNaveg.tif", bbox=None, coerce_f64_to_f32=True + ) + svfSaveg, _, _, _ = common.load_raster( + configDict["working_dir"] + "/svfSaveg.tif", bbox=None, coerce_f64_to_f32=True + ) + svfEaveg, _, _, _ = common.load_raster( + configDict["working_dir"] + "/svfEaveg.tif", bbox=None, coerce_f64_to_f32=True + ) + svfWaveg, _, _, _ = common.load_raster( + configDict["working_dir"] + "/svfWaveg.tif", bbox=None, coerce_f64_to_f32=True + ) else: svfveg = np.ones((rows, cols)) svfNveg = np.ones((rows, cols)) @@ -156,8 +178,8 @@ def solweig_run(configPath, feedback): # %matlab crazyness around 0 svfalfa = np.arcsin(np.exp(np.log(1.0 - tmp) / 2.0)) - wallheight, _, _, _ = common.load_raster(configDict["filepath_wh"], bbox=None) - wallaspect, _, _, _ = common.load_raster(configDict["filepath_wa"], bbox=None) + wallheight, _, _, _ = common.load_raster(configDict["filepath_wh"], bbox=None, coerce_f64_to_f32=True) + wallaspect, _, _, _ = common.load_raster(configDict["filepath_wa"], bbox=None, coerce_f64_to_f32=True) # Metdata headernum = 1 @@ -318,6 +340,7 @@ def solweig_run(configPath, feedback): dsm_trf_arr, dsm_crs_wkt, dsm_nd_val, + coerce_f64_to_f32=True, ) # Import shadow matrices (Anisotropic sky) @@ -825,6 +848,7 @@ def solweig_run(configPath, feedback): dsm_trf_arr, dsm_crs_wkt, dsm_nd_val, + coerce_f64_to_f32=True, ) if configDict["outputkup"] == "1": common.save_raster( @@ -833,6 +857,7 @@ def solweig_run(configPath, feedback): dsm_trf_arr, dsm_crs_wkt, dsm_nd_val, + coerce_f64_to_f32=True, ) if configDict["outputkdown"] == "1": common.save_raster( @@ -841,6 +866,7 @@ def solweig_run(configPath, feedback): dsm_trf_arr, dsm_crs_wkt, dsm_nd_val, + coerce_f64_to_f32=True, ) if configDict["outputlup"] == "1": common.save_raster( @@ -849,6 +875,7 @@ def solweig_run(configPath, feedback): dsm_trf_arr, dsm_crs_wkt, dsm_nd_val, + coerce_f64_to_f32=True, ) if configDict["outputldown"] == "1": common.save_raster( @@ -857,6 +884,7 @@ def solweig_run(configPath, feedback): dsm_trf_arr, dsm_crs_wkt, dsm_nd_val, + coerce_f64_to_f32=True, ) if configDict["outputsh"] == "1": common.save_raster( @@ -865,6 +893,7 @@ def solweig_run(configPath, feedback): dsm_trf_arr, dsm_crs_wkt, dsm_nd_val, + coerce_f64_to_f32=True, ) if configDict["outputkdiff"] == "1": common.save_raster( @@ -873,6 +902,7 @@ def solweig_run(configPath, feedback): dsm_trf_arr, dsm_crs_wkt, dsm_nd_val, + coerce_f64_to_f32=True, ) # Sky view image of patches @@ -961,4 +991,5 @@ def solweig_run(configPath, feedback): dsm_trf_arr, dsm_crs_wkt, dsm_nd_val, + coerce_f64_to_f32=True, ) diff --git a/umep/functions/SOLWEIGpython/UTCI_calculations.py b/umep/functions/SOLWEIGpython/UTCI_calculations.py index dbc5959..9a853a5 100644 --- a/umep/functions/SOLWEIGpython/UTCI_calculations.py +++ b/umep/functions/SOLWEIGpython/UTCI_calculations.py @@ -1,222 +1,225 @@ import numpy as np -def utci_polynomial(D_Tmrt, Ta, va, Pa): +def utci_polynomial(D_Tmrt, Ta, va, Pa): # calculate 6th order polynomial as approximation - UTCI_approx = Ta + \ - (6.07562052E-01) + \ - (-2.27712343E-02) * Ta + \ - (8.06470249E-04) * Ta * Ta + \ - (-1.54271372E-04) * Ta * Ta * Ta + \ - (-3.24651735E-06) * Ta * Ta * Ta * Ta + \ - (7.32602852E-08) * Ta * Ta * Ta * Ta * Ta + \ - (1.35959073E-09) * Ta * Ta * Ta * Ta * Ta * Ta + \ - (-2.25836520E+00) * va + \ - (8.80326035E-02) * Ta * va + \ - (2.16844454E-03) * Ta * Ta * va + \ - (-1.53347087E-05) * Ta * Ta * Ta * va + \ - (-5.72983704E-07) * Ta * Ta * Ta * Ta * va + \ - (-2.55090145E-09) * Ta * Ta * Ta * Ta * Ta * va + \ - (-7.51269505E-01) * va * va + \ - (-4.08350271E-03) * Ta * va * va + \ - (-5.21670675E-05) * Ta * Ta * va * va + \ - (1.94544667E-06) * Ta * Ta * Ta * va * va + \ - (1.14099531E-08) * Ta * Ta * Ta * Ta * va * va + \ - (1.58137256E-01) * va * va * va + \ - (-6.57263143E-05) * Ta * va * va * va + \ - (2.22697524E-07) * Ta * Ta * va * va * va + \ - (-4.16117031E-08) * Ta * Ta * Ta * va * va * va + \ - (-1.27762753E-02) * va * va * va * va + \ - (9.66891875E-06) * Ta * va * va * va * va + \ - (2.52785852E-09) * Ta * Ta * va * va * va * va + \ - (4.56306672E-04) * va * va * va * va * va + \ - (-1.74202546E-07) * Ta * va * va * va * va * va + \ - (-5.91491269E-06) * va * va * va * va * va * va + \ - (3.98374029E-01) * D_Tmrt + \ - (1.83945314E-04) * Ta * D_Tmrt + \ - (-1.73754510E-04) * Ta * Ta * D_Tmrt + \ - (-7.60781159E-07) * Ta * Ta * Ta * D_Tmrt + \ - (3.77830287E-08) * Ta * Ta * Ta * Ta * D_Tmrt + \ - (5.43079673E-10) * Ta * Ta * Ta * Ta * Ta * D_Tmrt + \ - (-2.00518269E-02) * va * D_Tmrt + \ - (8.92859837E-04) * Ta * va * D_Tmrt + \ - (3.45433048E-06) * Ta * Ta * va * D_Tmrt + \ - (-3.77925774E-07) * Ta * Ta * Ta * va * D_Tmrt + \ - (-1.69699377E-09) * Ta * Ta * Ta * Ta * va * D_Tmrt + \ - (1.69992415E-04) * va * va * D_Tmrt + \ - (-4.99204314E-05) * Ta * va * va * D_Tmrt + \ - (2.47417178E-07) * Ta * Ta * va * va * D_Tmrt + \ - (1.07596466E-08) * Ta * Ta * Ta * va * va * D_Tmrt + \ - (8.49242932E-05) * va * va * va * D_Tmrt + \ - (1.35191328E-06) * Ta * va * va * va * D_Tmrt + \ - (-6.21531254E-09) * Ta * Ta * va * va * va * D_Tmrt + \ - (-4.99410301E-06) * va * va * va * va * D_Tmrt + \ - (-1.89489258E-08) * Ta * va * va * va * va * D_Tmrt + \ - (8.15300114E-08) * va * va * va * va * va * D_Tmrt + \ - (7.55043090E-04) * D_Tmrt * D_Tmrt + \ - (-5.65095215E-05) * Ta * D_Tmrt * D_Tmrt + \ - (-4.52166564E-07) * Ta * Ta * D_Tmrt * D_Tmrt + \ - (2.46688878E-08) * Ta * Ta * Ta * D_Tmrt * D_Tmrt + \ - (2.42674348E-10) * Ta * Ta * Ta * Ta * D_Tmrt * D_Tmrt + \ - (1.54547250E-04) * va * D_Tmrt * D_Tmrt + \ - (5.24110970E-06) * Ta * va * D_Tmrt * D_Tmrt + \ - (-8.75874982E-08) * Ta * Ta * va * D_Tmrt * D_Tmrt + \ - (-1.50743064E-09) * Ta * Ta * Ta * va * D_Tmrt * D_Tmrt + \ - (-1.56236307E-05) * va * va * D_Tmrt * D_Tmrt + \ - (-1.33895614E-07) * Ta * va * va * D_Tmrt * D_Tmrt + \ - (2.49709824E-09) * Ta * Ta * va * va * D_Tmrt * D_Tmrt + \ - (6.51711721E-07) * va * va * va * D_Tmrt * D_Tmrt + \ - (1.94960053E-09) * Ta * va * va * va * D_Tmrt * D_Tmrt + \ - (-1.00361113E-08) * va * va * va * va * D_Tmrt * D_Tmrt + \ - (-1.21206673E-05) * D_Tmrt * D_Tmrt * D_Tmrt + \ - (-2.18203660E-07) * Ta * D_Tmrt * D_Tmrt * D_Tmrt + \ - (7.51269482E-09) * Ta * Ta * D_Tmrt * D_Tmrt * D_Tmrt + \ - (9.79063848E-11) * Ta * Ta * Ta * D_Tmrt * D_Tmrt * D_Tmrt + \ - (1.25006734E-06) * va * D_Tmrt * D_Tmrt * D_Tmrt + \ - (-1.81584736E-09) * Ta * va * D_Tmrt * D_Tmrt * D_Tmrt + \ - (-3.52197671E-10) * Ta * Ta * va * D_Tmrt * D_Tmrt * D_Tmrt + \ - (-3.36514630E-08) * va * va * D_Tmrt * D_Tmrt * D_Tmrt + \ - (1.35908359E-10) * Ta * va * va * D_Tmrt * D_Tmrt * D_Tmrt + \ - (4.17032620E-10) * va * va * va * D_Tmrt * D_Tmrt * D_Tmrt + \ - (-1.30369025E-09) * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + \ - (4.13908461E-10) * Ta * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + \ - (9.22652254E-12) * Ta * Ta * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + \ - (-5.08220384E-09) * va * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + \ - (-2.24730961E-11) * Ta * va * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + \ - (1.17139133E-10) * va * va * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + \ - (6.62154879E-10) * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + \ - (4.03863260E-13) * Ta * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + \ - (1.95087203E-12) * va * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + \ - (-4.73602469E-12) * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + \ - (5.12733497E+00) * Pa + \ - (-3.12788561E-01) * Ta * Pa + \ - (-1.96701861E-02) * Ta * Ta * Pa + \ - (9.99690870E-04) * Ta * Ta * Ta * Pa + \ - (9.51738512E-06) * Ta * Ta * Ta * Ta * Pa + \ - (-4.66426341E-07) * Ta * Ta * Ta * Ta * Ta * Pa + \ - (5.48050612E-01) * va * Pa + \ - (-3.30552823E-03) * Ta * va * Pa + \ - (-1.64119440E-03) * Ta * Ta * va * Pa + \ - (-5.16670694E-06) * Ta * Ta * Ta * va * Pa + \ - (9.52692432E-07) * Ta * Ta * Ta * Ta * va * Pa + \ - (-4.29223622E-02) * va * va * Pa + \ - (5.00845667E-03) * Ta * va * va * Pa + \ - (1.00601257E-06) * Ta * Ta * va * va * Pa + \ - (-1.81748644E-06) * Ta * Ta * Ta * va * va * Pa + \ - (-1.25813502E-03) * va * va * va * Pa + \ - (-1.79330391E-04) * Ta * va * va * va * Pa + \ - (2.34994441E-06) * Ta * Ta * va * va * va * Pa + \ - (1.29735808E-04) * va * va * va * va * Pa + \ - (1.29064870E-06) * Ta * va * va * va * va * Pa + \ - (-2.28558686E-06) * va * va * va * va * va * Pa + \ - (-3.69476348E-02) * D_Tmrt * Pa + \ - (1.62325322E-03) * Ta * D_Tmrt * Pa + \ - (-3.14279680E-05) * Ta * Ta * D_Tmrt * Pa + \ - (2.59835559E-06) * Ta * Ta * Ta * D_Tmrt * Pa + \ - (-4.77136523E-08) * Ta * Ta * Ta * Ta * D_Tmrt * Pa + \ - (8.64203390E-03) * va * D_Tmrt * Pa + \ - (-6.87405181E-04) * Ta * va * D_Tmrt * Pa + \ - (-9.13863872E-06) * Ta * Ta * va * D_Tmrt * Pa + \ - (5.15916806E-07) * Ta * Ta * Ta * va * D_Tmrt * Pa + \ - (-3.59217476E-05) * va * va * D_Tmrt * Pa + \ - (3.28696511E-05) * Ta * va * va * D_Tmrt * Pa + \ - (-7.10542454E-07) * Ta * Ta * va * va * D_Tmrt * Pa + \ - (-1.24382300E-05) * va * va * va * D_Tmrt * Pa + \ - (-7.38584400E-09) * Ta * va * va * va * D_Tmrt * Pa + \ - (2.20609296E-07) * va * va * va * va * D_Tmrt * Pa + \ - (-7.32469180E-04) * D_Tmrt * D_Tmrt * Pa + \ - (-1.87381964E-05) * Ta * D_Tmrt * D_Tmrt * Pa + \ - (4.80925239E-06) * Ta * Ta * D_Tmrt * D_Tmrt * Pa + \ - (-8.75492040E-08) * Ta * Ta * Ta * D_Tmrt * D_Tmrt * Pa + \ - (2.77862930E-05) * va * D_Tmrt * D_Tmrt * Pa + \ - (-5.06004592E-06) * Ta * va * D_Tmrt * D_Tmrt * Pa + \ - (1.14325367E-07) * Ta * Ta * va * D_Tmrt * D_Tmrt * Pa + \ - (2.53016723E-06) * va * va * D_Tmrt * D_Tmrt * Pa + \ - (-1.72857035E-08) * Ta * va * va * D_Tmrt * D_Tmrt * Pa + \ - (-3.95079398E-08) * va * va * va * D_Tmrt * D_Tmrt * Pa + \ - (-3.59413173E-07) * D_Tmrt * D_Tmrt * D_Tmrt * Pa + \ - (7.04388046E-07) * Ta * D_Tmrt * D_Tmrt * D_Tmrt * Pa + \ - (-1.89309167E-08) * Ta * Ta * D_Tmrt * D_Tmrt * D_Tmrt * Pa + \ - (-4.79768731E-07) * va * D_Tmrt * D_Tmrt * D_Tmrt * Pa + \ - (7.96079978E-09) * Ta * va * D_Tmrt * D_Tmrt * D_Tmrt * Pa + \ - (1.62897058E-09) * va * va * D_Tmrt * D_Tmrt * D_Tmrt * Pa + \ - (3.94367674E-08) * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * Pa + \ - (-1.18566247E-09) * Ta * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * Pa + \ - (3.34678041E-10) * va * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * Pa + \ - (-1.15606447E-10) * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * Pa + \ - (-2.80626406E+00) * Pa * Pa + \ - (5.48712484E-01) * Ta * Pa * Pa + \ - (-3.99428410E-03) * Ta * Ta * Pa * Pa + \ - (-9.54009191E-04) * Ta * Ta * Ta * Pa * Pa + \ - (1.93090978E-05) * Ta * Ta * Ta * Ta * Pa * Pa + \ - (-3.08806365E-01) * va * Pa * Pa + \ - (1.16952364E-02) * Ta * va * Pa * Pa + \ - (4.95271903E-04) * Ta * Ta * va * Pa * Pa + \ - (-1.90710882E-05) * Ta * Ta * Ta * va * Pa * Pa + \ - (2.10787756E-03) * va * va * Pa * Pa + \ - (-6.98445738E-04) * Ta * va * va * Pa * Pa + \ - (2.30109073E-05) * Ta * Ta * va * va * Pa * Pa + \ - (4.17856590E-04) * va * va * va * Pa * Pa + \ - (-1.27043871E-05) * Ta * va * va * va * Pa * Pa + \ - (-3.04620472E-06) * va * va * va * va * Pa * Pa + \ - (5.14507424E-02) * D_Tmrt * Pa * Pa + \ - (-4.32510997E-03) * Ta * D_Tmrt * Pa * Pa + \ - (8.99281156E-05) * Ta * Ta * D_Tmrt * Pa * Pa + \ - (-7.14663943E-07) * Ta * Ta * Ta * D_Tmrt * Pa * Pa + \ - (-2.66016305E-04) * va * D_Tmrt * Pa * Pa + \ - (2.63789586E-04) * Ta * va * D_Tmrt * Pa * Pa + \ - (-7.01199003E-06) * Ta * Ta * va * D_Tmrt * Pa * Pa + \ - (-1.06823306E-04) * va * va * D_Tmrt * Pa * Pa + \ - (3.61341136E-06) * Ta * va * va * D_Tmrt * Pa * Pa + \ - (2.29748967E-07) * va * va * va * D_Tmrt * Pa * Pa + \ - (3.04788893E-04) * D_Tmrt * D_Tmrt * Pa * Pa + \ - (-6.42070836E-05) * Ta * D_Tmrt * D_Tmrt * Pa * Pa + \ - (1.16257971E-06) * Ta * Ta * D_Tmrt * D_Tmrt * Pa * Pa + \ - (7.68023384E-06) * va * D_Tmrt * D_Tmrt * Pa * Pa + \ - (-5.47446896E-07) * Ta * va * D_Tmrt * D_Tmrt * Pa * Pa + \ - (-3.59937910E-08) * va * va * D_Tmrt * D_Tmrt * Pa * Pa + \ - (-4.36497725E-06) * D_Tmrt * D_Tmrt * D_Tmrt * Pa * Pa + \ - (1.68737969E-07) * Ta * D_Tmrt * D_Tmrt * D_Tmrt * Pa * Pa + \ - (2.67489271E-08) * va * D_Tmrt * D_Tmrt * D_Tmrt * Pa * Pa + \ - (3.23926897E-09) * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * Pa * Pa + \ - (-3.53874123E-02) * Pa * Pa * Pa + \ - (-2.21201190E-01) * Ta * Pa * Pa * Pa + \ - (1.55126038E-02) * Ta * Ta * Pa * Pa * Pa + \ - (-2.63917279E-04) * Ta * Ta * Ta * Pa * Pa * Pa + \ - (4.53433455E-02) * va * Pa * Pa * Pa + \ - (-4.32943862E-03) * Ta * va * Pa * Pa * Pa + \ - (1.45389826E-04) * Ta * Ta * va * Pa * Pa * Pa + \ - (2.17508610E-04) * va * va * Pa * Pa * Pa + \ - (-6.66724702E-05) * Ta * va * va * Pa * Pa * Pa + \ - (3.33217140E-05) * va * va * va * Pa * Pa * Pa + \ - (-2.26921615E-03) * D_Tmrt * Pa * Pa * Pa + \ - (3.80261982E-04) * Ta * D_Tmrt * Pa * Pa * Pa + \ - (-5.45314314E-09) * Ta * Ta * D_Tmrt * Pa * Pa * Pa + \ - (-7.96355448E-04) * va * D_Tmrt * Pa * Pa * Pa + \ - (2.53458034E-05) * Ta * va * D_Tmrt * Pa * Pa * Pa + \ - (-6.31223658E-06) * va * va * D_Tmrt * Pa * Pa * Pa + \ - (3.02122035E-04) * D_Tmrt * D_Tmrt * Pa * Pa * Pa + \ - (-4.77403547E-06) * Ta * D_Tmrt * D_Tmrt * Pa * Pa * Pa + \ - (1.73825715E-06) * va * D_Tmrt * D_Tmrt * Pa * Pa * Pa + \ - (-4.09087898E-07) * D_Tmrt * D_Tmrt * D_Tmrt * Pa * Pa * Pa + \ - (6.14155345E-01) * Pa * Pa * Pa * Pa + \ - (-6.16755931E-02) * Ta * Pa * Pa * Pa * Pa + \ - (1.33374846E-03) * Ta * Ta * Pa * Pa * Pa * Pa + \ - (3.55375387E-03) * va * Pa * Pa * Pa * Pa + \ - (-5.13027851E-04) * Ta * va * Pa * Pa * Pa * Pa + \ - (1.02449757E-04) * va * va * Pa * Pa * Pa * Pa + \ - (-1.48526421E-03) * D_Tmrt * Pa * Pa * Pa * Pa + \ - (-4.11469183E-05) * Ta * D_Tmrt * Pa * Pa * Pa * Pa + \ - (-6.80434415E-06) * va * D_Tmrt * Pa * Pa * Pa * Pa + \ - (-9.77675906E-06) * D_Tmrt * D_Tmrt * Pa * Pa * Pa * Pa + \ - (8.82773108E-02) * Pa * Pa * Pa * Pa * Pa + \ - (-3.01859306E-03) * Ta * Pa * Pa * Pa * Pa * Pa + \ - (1.04452989E-03) * va * Pa * Pa * Pa * Pa * Pa + \ - (2.47090539E-04) * D_Tmrt * Pa * Pa * Pa * Pa * Pa + \ - (1.48348065E-03) * Pa * Pa * Pa * Pa * Pa * Pa + UTCI_approx = ( + Ta + + (6.07562052e-01) + + (-2.27712343e-02) * Ta + + (8.06470249e-04) * Ta * Ta + + (-1.54271372e-04) * Ta * Ta * Ta + + (-3.24651735e-06) * Ta * Ta * Ta * Ta + + (7.32602852e-08) * Ta * Ta * Ta * Ta * Ta + + (1.35959073e-09) * Ta * Ta * Ta * Ta * Ta * Ta + + (-2.25836520e00) * va + + (8.80326035e-02) * Ta * va + + (2.16844454e-03) * Ta * Ta * va + + (-1.53347087e-05) * Ta * Ta * Ta * va + + (-5.72983704e-07) * Ta * Ta * Ta * Ta * va + + (-2.55090145e-09) * Ta * Ta * Ta * Ta * Ta * va + + (-7.51269505e-01) * va * va + + (-4.08350271e-03) * Ta * va * va + + (-5.21670675e-05) * Ta * Ta * va * va + + (1.94544667e-06) * Ta * Ta * Ta * va * va + + (1.14099531e-08) * Ta * Ta * Ta * Ta * va * va + + (1.58137256e-01) * va * va * va + + (-6.57263143e-05) * Ta * va * va * va + + (2.22697524e-07) * Ta * Ta * va * va * va + + (-4.16117031e-08) * Ta * Ta * Ta * va * va * va + + (-1.27762753e-02) * va * va * va * va + + (9.66891875e-06) * Ta * va * va * va * va + + (2.52785852e-09) * Ta * Ta * va * va * va * va + + (4.56306672e-04) * va * va * va * va * va + + (-1.74202546e-07) * Ta * va * va * va * va * va + + (-5.91491269e-06) * va * va * va * va * va * va + + (3.98374029e-01) * D_Tmrt + + (1.83945314e-04) * Ta * D_Tmrt + + (-1.73754510e-04) * Ta * Ta * D_Tmrt + + (-7.60781159e-07) * Ta * Ta * Ta * D_Tmrt + + (3.77830287e-08) * Ta * Ta * Ta * Ta * D_Tmrt + + (5.43079673e-10) * Ta * Ta * Ta * Ta * Ta * D_Tmrt + + (-2.00518269e-02) * va * D_Tmrt + + (8.92859837e-04) * Ta * va * D_Tmrt + + (3.45433048e-06) * Ta * Ta * va * D_Tmrt + + (-3.77925774e-07) * Ta * Ta * Ta * va * D_Tmrt + + (-1.69699377e-09) * Ta * Ta * Ta * Ta * va * D_Tmrt + + (1.69992415e-04) * va * va * D_Tmrt + + (-4.99204314e-05) * Ta * va * va * D_Tmrt + + (2.47417178e-07) * Ta * Ta * va * va * D_Tmrt + + (1.07596466e-08) * Ta * Ta * Ta * va * va * D_Tmrt + + (8.49242932e-05) * va * va * va * D_Tmrt + + (1.35191328e-06) * Ta * va * va * va * D_Tmrt + + (-6.21531254e-09) * Ta * Ta * va * va * va * D_Tmrt + + (-4.99410301e-06) * va * va * va * va * D_Tmrt + + (-1.89489258e-08) * Ta * va * va * va * va * D_Tmrt + + (8.15300114e-08) * va * va * va * va * va * D_Tmrt + + (7.55043090e-04) * D_Tmrt * D_Tmrt + + (-5.65095215e-05) * Ta * D_Tmrt * D_Tmrt + + (-4.52166564e-07) * Ta * Ta * D_Tmrt * D_Tmrt + + (2.46688878e-08) * Ta * Ta * Ta * D_Tmrt * D_Tmrt + + (2.42674348e-10) * Ta * Ta * Ta * Ta * D_Tmrt * D_Tmrt + + (1.54547250e-04) * va * D_Tmrt * D_Tmrt + + (5.24110970e-06) * Ta * va * D_Tmrt * D_Tmrt + + (-8.75874982e-08) * Ta * Ta * va * D_Tmrt * D_Tmrt + + (-1.50743064e-09) * Ta * Ta * Ta * va * D_Tmrt * D_Tmrt + + (-1.56236307e-05) * va * va * D_Tmrt * D_Tmrt + + (-1.33895614e-07) * Ta * va * va * D_Tmrt * D_Tmrt + + (2.49709824e-09) * Ta * Ta * va * va * D_Tmrt * D_Tmrt + + (6.51711721e-07) * va * va * va * D_Tmrt * D_Tmrt + + (1.94960053e-09) * Ta * va * va * va * D_Tmrt * D_Tmrt + + (-1.00361113e-08) * va * va * va * va * D_Tmrt * D_Tmrt + + (-1.21206673e-05) * D_Tmrt * D_Tmrt * D_Tmrt + + (-2.18203660e-07) * Ta * D_Tmrt * D_Tmrt * D_Tmrt + + (7.51269482e-09) * Ta * Ta * D_Tmrt * D_Tmrt * D_Tmrt + + (9.79063848e-11) * Ta * Ta * Ta * D_Tmrt * D_Tmrt * D_Tmrt + + (1.25006734e-06) * va * D_Tmrt * D_Tmrt * D_Tmrt + + (-1.81584736e-09) * Ta * va * D_Tmrt * D_Tmrt * D_Tmrt + + (-3.52197671e-10) * Ta * Ta * va * D_Tmrt * D_Tmrt * D_Tmrt + + (-3.36514630e-08) * va * va * D_Tmrt * D_Tmrt * D_Tmrt + + (1.35908359e-10) * Ta * va * va * D_Tmrt * D_Tmrt * D_Tmrt + + (4.17032620e-10) * va * va * va * D_Tmrt * D_Tmrt * D_Tmrt + + (-1.30369025e-09) * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + + (4.13908461e-10) * Ta * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + + (9.22652254e-12) * Ta * Ta * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + + (-5.08220384e-09) * va * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + + (-2.24730961e-11) * Ta * va * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + + (1.17139133e-10) * va * va * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + + (6.62154879e-10) * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + + (4.03863260e-13) * Ta * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + + (1.95087203e-12) * va * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + + (-4.73602469e-12) * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt + + (5.12733497e00) * Pa + + (-3.12788561e-01) * Ta * Pa + + (-1.96701861e-02) * Ta * Ta * Pa + + (9.99690870e-04) * Ta * Ta * Ta * Pa + + (9.51738512e-06) * Ta * Ta * Ta * Ta * Pa + + (-4.66426341e-07) * Ta * Ta * Ta * Ta * Ta * Pa + + (5.48050612e-01) * va * Pa + + (-3.30552823e-03) * Ta * va * Pa + + (-1.64119440e-03) * Ta * Ta * va * Pa + + (-5.16670694e-06) * Ta * Ta * Ta * va * Pa + + (9.52692432e-07) * Ta * Ta * Ta * Ta * va * Pa + + (-4.29223622e-02) * va * va * Pa + + (5.00845667e-03) * Ta * va * va * Pa + + (1.00601257e-06) * Ta * Ta * va * va * Pa + + (-1.81748644e-06) * Ta * Ta * Ta * va * va * Pa + + (-1.25813502e-03) * va * va * va * Pa + + (-1.79330391e-04) * Ta * va * va * va * Pa + + (2.34994441e-06) * Ta * Ta * va * va * va * Pa + + (1.29735808e-04) * va * va * va * va * Pa + + (1.29064870e-06) * Ta * va * va * va * va * Pa + + (-2.28558686e-06) * va * va * va * va * va * Pa + + (-3.69476348e-02) * D_Tmrt * Pa + + (1.62325322e-03) * Ta * D_Tmrt * Pa + + (-3.14279680e-05) * Ta * Ta * D_Tmrt * Pa + + (2.59835559e-06) * Ta * Ta * Ta * D_Tmrt * Pa + + (-4.77136523e-08) * Ta * Ta * Ta * Ta * D_Tmrt * Pa + + (8.64203390e-03) * va * D_Tmrt * Pa + + (-6.87405181e-04) * Ta * va * D_Tmrt * Pa + + (-9.13863872e-06) * Ta * Ta * va * D_Tmrt * Pa + + (5.15916806e-07) * Ta * Ta * Ta * va * D_Tmrt * Pa + + (-3.59217476e-05) * va * va * D_Tmrt * Pa + + (3.28696511e-05) * Ta * va * va * D_Tmrt * Pa + + (-7.10542454e-07) * Ta * Ta * va * va * D_Tmrt * Pa + + (-1.24382300e-05) * va * va * va * D_Tmrt * Pa + + (-7.38584400e-09) * Ta * va * va * va * D_Tmrt * Pa + + (2.20609296e-07) * va * va * va * va * D_Tmrt * Pa + + (-7.32469180e-04) * D_Tmrt * D_Tmrt * Pa + + (-1.87381964e-05) * Ta * D_Tmrt * D_Tmrt * Pa + + (4.80925239e-06) * Ta * Ta * D_Tmrt * D_Tmrt * Pa + + (-8.75492040e-08) * Ta * Ta * Ta * D_Tmrt * D_Tmrt * Pa + + (2.77862930e-05) * va * D_Tmrt * D_Tmrt * Pa + + (-5.06004592e-06) * Ta * va * D_Tmrt * D_Tmrt * Pa + + (1.14325367e-07) * Ta * Ta * va * D_Tmrt * D_Tmrt * Pa + + (2.53016723e-06) * va * va * D_Tmrt * D_Tmrt * Pa + + (-1.72857035e-08) * Ta * va * va * D_Tmrt * D_Tmrt * Pa + + (-3.95079398e-08) * va * va * va * D_Tmrt * D_Tmrt * Pa + + (-3.59413173e-07) * D_Tmrt * D_Tmrt * D_Tmrt * Pa + + (7.04388046e-07) * Ta * D_Tmrt * D_Tmrt * D_Tmrt * Pa + + (-1.89309167e-08) * Ta * Ta * D_Tmrt * D_Tmrt * D_Tmrt * Pa + + (-4.79768731e-07) * va * D_Tmrt * D_Tmrt * D_Tmrt * Pa + + (7.96079978e-09) * Ta * va * D_Tmrt * D_Tmrt * D_Tmrt * Pa + + (1.62897058e-09) * va * va * D_Tmrt * D_Tmrt * D_Tmrt * Pa + + (3.94367674e-08) * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * Pa + + (-1.18566247e-09) * Ta * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * Pa + + (3.34678041e-10) * va * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * Pa + + (-1.15606447e-10) * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * Pa + + (-2.80626406e00) * Pa * Pa + + (5.48712484e-01) * Ta * Pa * Pa + + (-3.99428410e-03) * Ta * Ta * Pa * Pa + + (-9.54009191e-04) * Ta * Ta * Ta * Pa * Pa + + (1.93090978e-05) * Ta * Ta * Ta * Ta * Pa * Pa + + (-3.08806365e-01) * va * Pa * Pa + + (1.16952364e-02) * Ta * va * Pa * Pa + + (4.95271903e-04) * Ta * Ta * va * Pa * Pa + + (-1.90710882e-05) * Ta * Ta * Ta * va * Pa * Pa + + (2.10787756e-03) * va * va * Pa * Pa + + (-6.98445738e-04) * Ta * va * va * Pa * Pa + + (2.30109073e-05) * Ta * Ta * va * va * Pa * Pa + + (4.17856590e-04) * va * va * va * Pa * Pa + + (-1.27043871e-05) * Ta * va * va * va * Pa * Pa + + (-3.04620472e-06) * va * va * va * va * Pa * Pa + + (5.14507424e-02) * D_Tmrt * Pa * Pa + + (-4.32510997e-03) * Ta * D_Tmrt * Pa * Pa + + (8.99281156e-05) * Ta * Ta * D_Tmrt * Pa * Pa + + (-7.14663943e-07) * Ta * Ta * Ta * D_Tmrt * Pa * Pa + + (-2.66016305e-04) * va * D_Tmrt * Pa * Pa + + (2.63789586e-04) * Ta * va * D_Tmrt * Pa * Pa + + (-7.01199003e-06) * Ta * Ta * va * D_Tmrt * Pa * Pa + + (-1.06823306e-04) * va * va * D_Tmrt * Pa * Pa + + (3.61341136e-06) * Ta * va * va * D_Tmrt * Pa * Pa + + (2.29748967e-07) * va * va * va * D_Tmrt * Pa * Pa + + (3.04788893e-04) * D_Tmrt * D_Tmrt * Pa * Pa + + (-6.42070836e-05) * Ta * D_Tmrt * D_Tmrt * Pa * Pa + + (1.16257971e-06) * Ta * Ta * D_Tmrt * D_Tmrt * Pa * Pa + + (7.68023384e-06) * va * D_Tmrt * D_Tmrt * Pa * Pa + + (-5.47446896e-07) * Ta * va * D_Tmrt * D_Tmrt * Pa * Pa + + (-3.59937910e-08) * va * va * D_Tmrt * D_Tmrt * Pa * Pa + + (-4.36497725e-06) * D_Tmrt * D_Tmrt * D_Tmrt * Pa * Pa + + (1.68737969e-07) * Ta * D_Tmrt * D_Tmrt * D_Tmrt * Pa * Pa + + (2.67489271e-08) * va * D_Tmrt * D_Tmrt * D_Tmrt * Pa * Pa + + (3.23926897e-09) * D_Tmrt * D_Tmrt * D_Tmrt * D_Tmrt * Pa * Pa + + (-3.53874123e-02) * Pa * Pa * Pa + + (-2.21201190e-01) * Ta * Pa * Pa * Pa + + (1.55126038e-02) * Ta * Ta * Pa * Pa * Pa + + (-2.63917279e-04) * Ta * Ta * Ta * Pa * Pa * Pa + + (4.53433455e-02) * va * Pa * Pa * Pa + + (-4.32943862e-03) * Ta * va * Pa * Pa * Pa + + (1.45389826e-04) * Ta * Ta * va * Pa * Pa * Pa + + (2.17508610e-04) * va * va * Pa * Pa * Pa + + (-6.66724702e-05) * Ta * va * va * Pa * Pa * Pa + + (3.33217140e-05) * va * va * va * Pa * Pa * Pa + + (-2.26921615e-03) * D_Tmrt * Pa * Pa * Pa + + (3.80261982e-04) * Ta * D_Tmrt * Pa * Pa * Pa + + (-5.45314314e-09) * Ta * Ta * D_Tmrt * Pa * Pa * Pa + + (-7.96355448e-04) * va * D_Tmrt * Pa * Pa * Pa + + (2.53458034e-05) * Ta * va * D_Tmrt * Pa * Pa * Pa + + (-6.31223658e-06) * va * va * D_Tmrt * Pa * Pa * Pa + + (3.02122035e-04) * D_Tmrt * D_Tmrt * Pa * Pa * Pa + + (-4.77403547e-06) * Ta * D_Tmrt * D_Tmrt * Pa * Pa * Pa + + (1.73825715e-06) * va * D_Tmrt * D_Tmrt * Pa * Pa * Pa + + (-4.09087898e-07) * D_Tmrt * D_Tmrt * D_Tmrt * Pa * Pa * Pa + + (6.14155345e-01) * Pa * Pa * Pa * Pa + + (-6.16755931e-02) * Ta * Pa * Pa * Pa * Pa + + (1.33374846e-03) * Ta * Ta * Pa * Pa * Pa * Pa + + (3.55375387e-03) * va * Pa * Pa * Pa * Pa + + (-5.13027851e-04) * Ta * va * Pa * Pa * Pa * Pa + + (1.02449757e-04) * va * va * Pa * Pa * Pa * Pa + + (-1.48526421e-03) * D_Tmrt * Pa * Pa * Pa * Pa + + (-4.11469183e-05) * Ta * D_Tmrt * Pa * Pa * Pa * Pa + + (-6.80434415e-06) * va * D_Tmrt * Pa * Pa * Pa * Pa + + (-9.77675906e-06) * D_Tmrt * D_Tmrt * Pa * Pa * Pa * Pa + + (8.82773108e-02) * Pa * Pa * Pa * Pa * Pa + + (-3.01859306e-03) * Ta * Pa * Pa * Pa * Pa * Pa + + (1.04452989e-03) * va * Pa * Pa * Pa * Pa * Pa + + (2.47090539e-04) * D_Tmrt * Pa * Pa * Pa * Pa * Pa + + (1.48348065e-03) * Pa * Pa * Pa * Pa * Pa * Pa + ) return UTCI_approx + def utci_calculator(Ta, RH, Tmrt, va10m): # Program for calculating UTCI Temperature (UTCI) # released for public use after termination of COST Action 730 @@ -229,17 +232,27 @@ def utci_calculator(Ta, RH, Tmrt, va10m): UTCI_approx = -999 else: # saturation vapour pressure (es) - g = np.array([-2.8365744E3, - 6.028076559E3, 1.954263612E1, - 2.737830188E-2, - 1.6261698E-5, 7.0229056E-10, - 1.8680009E-13, 2.7150305]) + g = np.array( + [ + -2.8365744e3, + -6.028076559e3, + 1.954263612e1, + -2.737830188e-2, + 1.6261698e-5, + 7.0229056e-10, + -1.8680009e-13, + 2.7150305, + ] + ) tk = Ta + 273.15 # ! air temp in K es = g[7] * np.log(tk) for i in range(0, 7): - es = es + g[i] * tk ** (i + 1 - 3.) + es = es + g[i] * tk ** (i + 1 - 3.0) es = np.exp(es) * 0.01 - ehPa = es * RH / 100. + ehPa = es * RH / 100.0 D_Tmrt = Tmrt - Ta Pa = ehPa / 10.0 # use vapour pressure in kPa @@ -250,6 +263,7 @@ def utci_calculator(Ta, RH, Tmrt, va10m): return UTCI_approx + def utci_calculator_grid(Ta, RH, Tmrt, va10m, feedback): # Program for calculating UTCI Temperature (UTCI) # released for public use after termination of COST Action 730 @@ -263,28 +277,39 @@ def utci_calculator_grid(Ta, RH, Tmrt, va10m, feedback): # If Tair or relative humidity is NaN, set all UTCI to -999 if Ta <= -999 or RH <= -999: - UTCI_approx = np.ones((rows, cols)) * -999 + UTCI_approx = np.ones((rows, cols), dtype=np.float32) * -999 # Else, create an empty raster for UTCI and start estimating UTCI else: - UTCI_approx = np.zeros((rows, cols)) + UTCI_approx = np.zeros((rows, cols), dtype=np.float32) # saturation vapour pressure (es) - g = np.array([-2.8365744E3, - 6.028076559E3, 1.954263612E1, - 2.737830188E-2, - 1.6261698E-5, 7.0229056E-10, - 1.8680009E-13, 2.7150305]) + g = np.array( + [ + -2.8365744e3, + -6.028076559e3, + 1.954263612e1, + -2.737830188e-2, + 1.6261698e-5, + 7.0229056e-10, + -1.8680009e-13, + 2.7150305, + ], + dtype=np.float32, + ) tk = Ta + 273.15 # ! air temp in K es = g[7] * np.log(tk) for i in range(0, 7): - es = es + g[i] * tk ** (i + 1 - 3.) + es = es + g[i] * tk ** (i + 1 - 3.0) es = np.exp(es) * 0.01 - ehPa = es * RH / 100. + ehPa = es * RH / 100.0 Pa = ehPa / 10.0 # use vapour pressure in kPa # For progress counter index = 0 - total = 100. / (rows * cols) + total = 100.0 / (rows * cols) # Start UTCI approximation for iy in np.arange(rows): @@ -292,10 +317,9 @@ def utci_calculator_grid(Ta, RH, Tmrt, va10m, feedback): feedback.setProgressText("Calculation cancelled") break for ix in np.arange(cols): - D_Tmrt = Tmrt[iy, ix] - Ta # va = va10m[iy, ix] - + # Set UTCI to NaN (-999) if Tmrt or wind speed is NaN if Tmrt[iy, ix] <= -999 or va10m[iy, ix] <= -999: UTCI_approx[iy, ix] = -9999 @@ -307,4 +331,4 @@ def utci_calculator_grid(Ta, RH, Tmrt, va10m, feedback): index = index + 1 feedback.setProgress(int(index * total)) - return UTCI_approx \ No newline at end of file + return UTCI_approx diff --git a/umep/functions/SOLWEIGpython/solweig_runner.py b/umep/functions/SOLWEIGpython/solweig_runner.py index ab16cb5..a556e8f 100644 --- a/umep/functions/SOLWEIGpython/solweig_runner.py +++ b/umep/functions/SOLWEIGpython/solweig_runner.py @@ -1,14 +1,29 @@ import json import logging +import zipfile +from pathlib import Path from types import SimpleNamespace from typing import Any, Dict, List, Optional import numpy as np -logger = logging.getLogger(__name__) -logger.setLevel(logging.INFO) +from ... import common +from ...class_configs import ( + EnvironData, + RasterData, + ShadowMatrices, + SolweigConfig, + SvfData, + TgMaps, + WallsData, +) +from ...tile_manager import TileManager +from . import PET_calculations +from . import Solweig_2025a_calc_forprocessing as so +from . import UTCI_calculations as utci +from .CirclePlotBar import PolarBarPlot +from .wallsAsNetCDF import walls_as_netcdf -# handle for QGIS which does not have matplotlib by default? try: from matplotlib import pyplot as plt @@ -16,13 +31,9 @@ except ImportError: PLT = False -from ... import common -from ...class_configs import EnvironData, RasterData, ShadowMatrices, SolweigConfig, SvfData, TgMaps, WallsData -from . import PET_calculations -from . import Solweig_2025a_calc_forprocessing as so -from . import UTCI_calculations as utci -from .CirclePlotBar import PolarBarPlot -from .wallsAsNetCDF import walls_as_netcdf + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) def dict_to_namespace(d): @@ -62,10 +73,14 @@ def __init__( params_json_path: str, amax_local_window_m: int = 100, amax_local_perc: float = 99.9, + use_tiled_loading: bool = False, + tile_size: int = 1000, ): """Initialize the SOLWEIG runner with configuration and parameters.""" logger.info("Starting SOLWEIG setup") self.config = config + self.use_tiled_loading = use_tiled_loading + self.tile_size = tile_size self.config.validate() # Progress tracking settings self.progress = None @@ -88,23 +103,96 @@ def __init__( self.params = dict_to_namespace(params_dict) except Exception as e: raise RuntimeError(f"Failed to load parameters from {params_json_path}: {e}") + # Initialize SVF and Raster data - self.svf_data = SvfData(self.config) - self.raster_data = RasterData( - self.config, - self.params, - self.svf_data, - amax_local_window_m, - amax_local_perc, - ) - # Location data - left_x = self.raster_data.trf_arr[0] - top_y = self.raster_data.trf_arr[3] - lng, lat = common.xy_to_lnglat(self.raster_data.crs_wkt, left_x, top_y) - alt = float(np.nanmedian(self.raster_data.dsm)) - if alt < 0: - alt = 3 - self.location = {"longitude": lng, "latitude": lat, "altitude": alt} + # Check if tiled loading is enabled + if self.use_tiled_loading: + logger.info("Using tiled loading for raster data") + + # Get metadata from DSM to initialize TileManager and Location + dsm_meta = common.get_raster_metadata(self.config.dsm_path) + rows = dsm_meta["rows"] + cols = dsm_meta["cols"] + # Transform is always in GDAL format [c, a, b, f, d, e] + pixel_size = dsm_meta["transform"][1] # transform[1] is pixel width + + # Check if svf.tif exists, if not unzip + # This is critical because tiled loading expects files to be present + svf_file = Path(self.config.working_dir) / "svf.tif" + if not svf_file.exists(): + logger.info("Unzipping SVF files for tiled access...") + with zipfile.ZipFile(self.config.svf_path, "r") as zip_ref: + zip_ref.extractall(self.config.working_dir) + + # Initialize TileManager + self.tile_manager = TileManager( + rows=rows, + cols=cols, + tile_size=self.tile_size, + pixel_size=pixel_size, + buffer_dist=150.0, # Fixed buffer + ) + + # Location data from metadata + # Transform is always in GDAL format [c, a, b, f, d, e] + left_x = dsm_meta["transform"][0] # c (xoff) + top_y = dsm_meta["transform"][3] # f (yoff) + lng, lat = common.xy_to_lnglat(dsm_meta["crs"], left_x, top_y) + + # Altitude approximation (we don't have full DSM loaded) + # Read a small window from center + center_r, center_c = rows // 2, cols // 2 + center_val = common.read_raster_window( + self.config.dsm_path, (slice(center_r, center_r + 1), slice(center_c, center_c + 1)) + ) + alt = float(center_val[0, 0]) + if alt < 0: + alt = 3 + + self.location = {"longitude": lng, "latitude": lat, "altitude": alt} + + # Store metadata for later use + self.rows = rows + self.cols = cols + # Transform is already in GDAL format [c, a, b, f, d, e] + self.transform = dsm_meta["transform"] + self.crs = dsm_meta["crs"] + + # We do NOT instantiate RasterData/SvfData here. + self.raster_data = None + self.svf_data = None + self.shadow_mats = None + self.tg_maps = None + self.walls_data = None + + # Store params for tiled instantiation + self.amax_local_window_m = amax_local_window_m + self.amax_local_perc = amax_local_perc + + else: + logger.info("Using eager loading for raster data") + self.svf_data = SvfData(self.config) + self.raster_data = RasterData( + self.config, + self.params, + self.svf_data, + amax_local_window_m, + amax_local_perc, + ) + # Location data + left_x = self.raster_data.trf_arr[0] + top_y = self.raster_data.trf_arr[3] + lng, lat = common.xy_to_lnglat(self.raster_data.crs_wkt, left_x, top_y) + alt = float(np.nanmedian(self.raster_data.dsm)) + if alt < 0: + alt = 3 + self.location = {"longitude": lng, "latitude": lat, "altitude": alt} + + self.rows = self.raster_data.rows + self.cols = self.raster_data.cols + self.transform = self.raster_data.trf_arr + self.crs = self.raster_data.crs_wkt + # weather data if self.config.use_epw_file: self.environ_data = self.load_epw_weather() @@ -112,33 +200,14 @@ def __init__( else: self.environ_data = self.load_met_weather(header_rows=1, delim=" ") logger.info("Weather data loaded from MET file") - # POIs check + if self.config.poi_path: self.load_poi_data() logger.info("POI data loaded from %s", self.config.poi_path) - # Import shadow matrices (Anisotropic sky) - self.shadow_mats = ShadowMatrices(self.config, self.params, self.svf_data) - logger.info("Shadow matrices initialized") - # % Ts parameterisation maps - self.tg_maps = TgMaps( - self.config.use_landcover, - self.params, - self.raster_data, - ) - logger.info("TgMaps initialized") - # Import data for wall temperature parameterization - # Use wall of interest + if self.config.woi_path: self.load_woi_data() logger.info("WOI data loaded from %s", self.config.woi_path) - self.walls_data = WallsData( - self.config, - self.params, - self.raster_data, - self.environ_data, - self.tg_maps, - ) - logger.info("WallsData initialized") def test_hook(self) -> None: """Test hook for testing loaded init state.""" @@ -159,7 +228,7 @@ def load_epw_weather(self) -> EnvironData: def load_met_weather(self, header_rows: int = 1, delim: str = " ") -> EnvironData: """Load weather data from a MET file.""" met_path_str = str(common.check_path(self.config.met_path)) - met_data = np.loadtxt(met_path_str, skiprows=header_rows, delimiter=delim) + met_data = np.loadtxt(met_path_str, skiprows=header_rows, delimiter=delim, dtype=np.float32) return EnvironData( self.config, self.params, @@ -201,7 +270,7 @@ def hemispheric_image(self): """ n_patches = self.shadow_mats.shmat.shape[2] n_pois = self.poi_pixel_xys.shape[0] - patch_characteristics = np.zeros((n_patches, n_pois)) + patch_characteristics = np.zeros((n_patches, n_pois), dtype=np.float32) # Get POI indices as integer arrays poi_y = self.poi_pixel_xys[:, 2].astype(int) @@ -353,6 +422,37 @@ def calc_solweig( ) def run(self) -> None: + """Run the SOLWEIG model.""" + if self.use_tiled_loading: + self.run_tiled() + else: + self.run_standard() + + def run_standard(self) -> None: + # Initialize execution-specific data structures (same pattern as run_tiled) + logger.info("Initializing data for standard execution...") + + # Import shadow matrices (Anisotropic sky) + self.shadow_mats = ShadowMatrices(self.config, self.params, self.svf_data) + logger.info("Shadow matrices initialized") + + # Ts parameterisation maps + self.tg_maps = TgMaps( + self.config.use_landcover, + self.params, + self.raster_data, + ) + logger.info("TgMaps initialized") + + self.walls_data = WallsData( + self.config, + self.params, + self.raster_data, + self.environ_data, + self.tg_maps, + ) + logger.info("WallsData initialized") + # Posture settings if self.params.Tmrt_params.Value.posture == "Standing": posture = self.params.Posture.Standing.Value @@ -378,12 +478,12 @@ def run(self) -> None: if np.unique(self.environ_data.DOY).shape[0] > 1: unique_days = np.unique(self.environ_data.DOY) first_unique_day = self.environ_data.DOY[unique_days[0] == self.environ_data.DOY] - I0_array = np.zeros_like(first_unique_day) + I0_array = np.zeros_like(first_unique_day, dtype=np.float32) else: first_unique_day = self.environ_data.DOY.copy() - I0_array = np.zeros_like(self.environ_data.DOY) + I0_array = np.zeros_like(self.environ_data.DOY, dtype=np.float32) # For Tmrt plot - tmrt_agg = np.zeros((self.raster_data.rows, self.raster_data.cols)) + tmrt_agg = np.zeros((self.raster_data.rows, self.raster_data.cols), dtype=np.float32) # Number of iterations num = len(self.environ_data.Ta) # Prepare progress tracking @@ -633,6 +733,7 @@ def run(self) -> None: self.raster_data.trf_arr, self.raster_data.crs_wkt, self.raster_data.nd_val, + coerce_f64_to_f32=True, ) if self.config.output_kup: common.save_raster( @@ -641,6 +742,7 @@ def run(self) -> None: self.raster_data.trf_arr, self.raster_data.crs_wkt, self.raster_data.nd_val, + coerce_f64_to_f32=True, ) if self.config.output_kdown: common.save_raster( @@ -649,6 +751,7 @@ def run(self) -> None: self.raster_data.trf_arr, self.raster_data.crs_wkt, self.raster_data.nd_val, + coerce_f64_to_f32=True, ) if self.config.output_lup: common.save_raster( @@ -657,6 +760,7 @@ def run(self) -> None: self.raster_data.trf_arr, self.raster_data.crs_wkt, self.raster_data.nd_val, + coerce_f64_to_f32=True, ) if self.config.output_ldown: common.save_raster( @@ -665,6 +769,7 @@ def run(self) -> None: self.raster_data.trf_arr, self.raster_data.crs_wkt, self.raster_data.nd_val, + coerce_f64_to_f32=True, ) if self.config.output_sh: common.save_raster( @@ -673,6 +778,7 @@ def run(self) -> None: self.raster_data.trf_arr, self.raster_data.crs_wkt, self.raster_data.nd_val, + coerce_f64_to_f32=True, ) if self.config.output_kdiff: common.save_raster( @@ -681,6 +787,7 @@ def run(self) -> None: self.raster_data.trf_arr, self.raster_data.crs_wkt, self.raster_data.nd_val, + coerce_f64_to_f32=True, ) # Sky view image of patches @@ -771,7 +878,8 @@ def run(self) -> None: self.location["altitude"], self.shadow_mats.patch_option, ] - ] + ], + dtype=np.float32, ) np.savetxt( self.config.output_dir + "/treeplantersettings.txt", @@ -790,4 +898,617 @@ def run(self) -> None: self.raster_data.trf_arr, self.raster_data.crs_wkt, self.raster_data.nd_val, + coerce_f64_to_f32=True, + ) + + def run_tiled(self) -> None: + """Run SOLWEIG with tiled loading (Tile -> Timestep loop).""" + logger.info("Starting tiled execution") + + # Posture settings (same as standard) + if self.params.Tmrt_params.Value.posture == "Standing": + posture = self.params.Posture.Standing.Value + else: + posture = self.params.Posture.Sitting.Value + + first = np.round(posture.height) + if first == 0.0: + first = 1.0 + second = np.round(posture.height * 20.0) + + # Time variables + num = len(self.environ_data.Ta) + if num == 0: + logger.error("No timesteps to process") + return + + # Validate environ_data arrays have consistent length + if not all( + len(arr) == num + for arr in [self.environ_data.YYYY, self.environ_data.DOY, self.environ_data.hours, self.environ_data.minu] + ): + logger.error("Inconsistent lengths in environ_data arrays") + return + + if self.environ_data.Ta.__len__() == 1: + timestepdec = 0 + else: + timestepdec = self.environ_data.dectime[1] - self.environ_data.dectime[0] + + # Prepare output files + logger.info("Initializing output rasters...") + time_codes = [] + for i in range(num): + # Generate time code + if self.environ_data.altitude[i] > 0: + w = "D" + else: + w = "N" + XH = "0" if self.environ_data.hours[i] < 10 else "" + XM = "0" if self.environ_data.minu[i] < 10 else "" + + time_code = ( + str(int(self.environ_data.YYYY[i])) + + "_" + + str(int(self.environ_data.DOY[i])) + + "_" + + XH + + str(int(self.environ_data.hours[i])) + + XM + + str(int(self.environ_data.minu[i])) + + w + ) + time_codes.append(time_code) + + # Create empty rasters for enabled outputs + outputs = [ + ("output_tmrt", "Tmrt"), + ("output_kup", "Kup"), + ("output_kdown", "Kdown"), + ("output_lup", "Lup"), + ("output_ldown", "Ldown"), + ("output_sh", "Shadow"), + ("output_kdiff", "Kdiff"), + ] + + for cfg_attr, prefix in outputs: + if getattr(self.config, cfg_attr): + out_path = str(self.config.output_dir) + "/" + prefix + "_" + time_code + ".tif" + common.create_empty_raster( + out_path, + self.rows, + self.cols, + self.transform, + str(self.crs) if self.crs else "", + nodata=-9999.0, + ) + + # Create average Tmrt raster + common.create_empty_raster( + str(self.config.output_dir) + "/Tmrt_average.tif", + self.rows, + self.cols, + self.transform, + str(self.crs) if self.crs else "", + nodata=-9999.0, + ) + + # Prepare progress + self.prep_progress(self.tile_manager.total_tiles * num) + + # 1. Initialize State for all tiles + logger.info("Initializing state for all tiles...") + tile_states = [] + tmrt_agg_tiles = [] # Store aggregation per tile + + tiles_list = list(self.tile_manager.get_tiles()) + if len(tiles_list) == 0: + logger.error("No tiles generated by TileManager") + return + + logger.info(f"Initializing {len(tiles_list)} tiles...") + + for tile in tiles_list: + # Load minimal data for TgMaps (lcgrid) + if self.config.use_landcover: + lcgrid = common.read_raster_window(self.config.lc_path, tile.full_slice) + else: + lcgrid = None + + # Mock RasterData for TgMaps initialization + mock_rd = SimpleNamespace( + rows=tile.full_shape[0], + cols=tile.full_shape[1], + lcgrid=lcgrid, + ) + + tg_maps = TgMaps(self.config.use_landcover, self.params, mock_rd, quiet=True) + tile_states.append(tg_maps) + tmrt_agg_tiles.append(np.zeros(tile.core_shape, dtype=np.float32)) + + # Reset time variables + elvis = 0.0 + firstdaytime = 1.0 + timeadd = 0.0 + + # Cache for steradians (constant across tiles and timesteps) + cached_steradians = None + + # 2. Iterate Timesteps + for i in range(num): + logger.info(f"Processing timestep {i + 1}/{num}") + + # Capture current time state for this timestep to ensure all tiles use the same time + current_firstdaytime = firstdaytime + current_timeadd = timeadd + current_timestepdec = timestepdec + + # Variables to hold the next state (will be set by tiles) + next_firstdaytime = None + next_timeadd = None + next_timestepdec = None # 3. Iterate Tiles + for tile_idx, tile in enumerate(self.tile_manager.get_tiles()): + self.proceed = self.iter_progress() + if not self.proceed: + break + + # Load Tile Data (Expensive IO) + self.svf_data = SvfData(self.config, tile_spec=tile) + self.raster_data = RasterData( + self.config, + self.params, + self.svf_data, + self.amax_local_window_m, + self.amax_local_perc, + tile_spec=tile, + ) + + # Restore State + self.tg_maps = tile_states[tile_idx] + + # Initialize other components for this tile + # ShadowMatrices and WallsData are stateless (reloaded/recalc per step) + self.shadow_mats = ShadowMatrices(self.config, self.params, self.svf_data, tile_spec=tile) + + # Restore cached steradians if available + if cached_steradians is not None: + self.shadow_mats.steradians = cached_steradians + + self.walls_data = WallsData( + self.config, + self.params, + self.raster_data, + self.environ_data, + self.tg_maps, + tile_spec=tile, + ) + + # Run Calculation + ( + Tmrt, + Kdown, + Kup, + Ldown, + Lup, + Tg, + ea, + esky, + I0, + CI, + shadow, + res_firstdaytime, + res_timestepdec, + res_timeadd, + Tgmap1_new, + Tgmap1E_new, + Tgmap1S_new, + Tgmap1W_new, + Tgmap1N_new, + Keast, + Ksouth, + Kwest, + Knorth, + Least, + Lsouth, + Lwest, + Lnorth, + KsideI, + TgOut1_new, + TgOut, + radIout, + radDout, + Lside, + Lsky_patch_characteristics, + CI_Tg, + CI_TgG, + KsideD, + dRad, + Kside, + steradians_new, + voxelTable, + ) = self.calc_solweig( + i, + elvis, + first, + second, + current_firstdaytime, + current_timeadd, + current_timestepdec, + posture, + ) + + # Explicitly update tile state with new thermal arrays + tile_states[tile_idx].Tgmap1 = Tgmap1_new + tile_states[tile_idx].Tgmap1E = Tgmap1E_new + tile_states[tile_idx].Tgmap1S = Tgmap1S_new + tile_states[tile_idx].Tgmap1W = Tgmap1W_new + tile_states[tile_idx].Tgmap1N = Tgmap1N_new + tile_states[tile_idx].TgOut1 = TgOut1_new + + # Update steradians cache and shadow_mats + if steradians_new is not None: + self.shadow_mats.steradians = steradians_new + + # Capture the next state (idempotent across tiles) + next_firstdaytime = res_firstdaytime + next_timestepdec = res_timestepdec + next_timeadd = res_timeadd + + # Cache steradians if calculated (only happens on first iteration) + if cached_steradians is None and steradians_new is not None and np.any(steradians_new): + cached_steradians = steradians_new + + # Process POIs that intersect this tile's write window + if self.poi_pixel_xys is not None: + write_win = tile.write_window + + # Track POIs processed for validation (only on first timestep, first tile) + if i == 0 and tile_idx == 0: + self._poi_processed_flags = np.zeros(self.poi_pixel_xys.shape[0], dtype=bool) + + for n in range(self.poi_pixel_xys.shape[0]): + idx, row_global, col_global = self.poi_pixel_xys[n] + row_global = int(row_global) + col_global = int(col_global) + + # Check if POI is in this tile's write window + if ( + write_win.row_off <= row_global < write_win.row_off + write_win.height + and write_win.col_off <= col_global < write_win.col_off + write_win.width + ): + # Convert global to tile-local coordinates + row_tile = row_global - tile.read_window.row_off + col_tile = col_global - tile.read_window.col_off + + # Build result row + result_row = { + "poi_idx": idx, + "poi_name": self.poi_names[idx], + "col_idx": col_global, + "row_idx": row_global, + "yyyy": self.environ_data.YYYY[i], + "id": self.environ_data.jday[i], + "it": self.environ_data.hours[i], + "imin": self.environ_data.minu[i], + "dectime": self.environ_data.dectime[i], + "altitude": self.environ_data.altitude[i], + "azimuth": self.environ_data.azimuth[i], + "kdir": radIout, + "kdiff": radDout, + "kglobal": self.environ_data.radG[i], + "kdown": Kdown[row_tile, col_tile], + "kup": Kup[row_tile, col_tile], + "keast": Keast[row_tile, col_tile], + "ksouth": Ksouth[row_tile, col_tile], + "kwest": Kwest[row_tile, col_tile], + "knorth": Knorth[row_tile, col_tile], + "ldown": Ldown[row_tile, col_tile], + "lup": Lup[row_tile, col_tile], + "least": Least[row_tile, col_tile], + "lsouth": Lsouth[row_tile, col_tile], + "lwest": Lwest[row_tile, col_tile], + "lnorth": Lnorth[row_tile, col_tile], + "Ta": self.environ_data.Ta[i], + "Tg": TgOut[row_tile, col_tile], + "RH": self.environ_data.RH[i], + "Esky": esky, + "Tmrt": Tmrt[row_tile, col_tile], + "I0": I0, + "CI": CI, + "Shadow": shadow[row_tile, col_tile], + "SVF_b": self.svf_data.svf[row_tile, col_tile], + "SVF_bv": self.raster_data.svfbuveg[row_tile, col_tile], + "KsideI": KsideI[row_tile, col_tile], + } + + # Calculate PET and UTCI + WsPET = (1.1 / self.params.Wind_Height.Value.magl) ** 0.2 * self.environ_data.Ws[i] + WsUTCI = (10.0 / self.params.Wind_Height.Value.magl) ** 0.2 * self.environ_data.Ws[i] + + resultPET = PET_calculations._PET( + self.environ_data.Ta[i], + self.environ_data.RH[i], + Tmrt[row_tile, col_tile], + WsPET, + self.params.PET_settings.Value.Weight, + self.params.PET_settings.Value.Age, + self.params.PET_settings.Value.Height, + self.params.PET_settings.Value.Activity, + self.params.PET_settings.Value.clo, + self.params.PET_settings.Value.Sex, + ) + result_row["PET"] = resultPET + + resultUTCI = utci.utci_calculator( + self.environ_data.Ta[i], self.environ_data.RH[i], Tmrt[row_tile, col_tile], WsUTCI + ) + result_row["UTCI"] = resultUTCI + result_row["CI_Tg"] = CI_Tg + result_row["CI_TgG"] = CI_TgG + result_row["KsideD"] = KsideD[row_tile, col_tile] + result_row["Lside"] = Lside[row_tile, col_tile] + result_row["diffDown"] = dRad[row_tile, col_tile] + result_row["Kside"] = Kside[row_tile, col_tile] + + self.poi_results.append(result_row) + + # Mark POI as processed (first timestep only) + if i == 0 and hasattr(self, "_poi_processed_flags"): + self._poi_processed_flags[n] = True + + # Process WOIs that intersect this tile's write window + if self.config.use_wall_scheme and self.woi_pixel_xys is not None: + write_win = tile.write_window + + # Track WOIs processed for validation (only on first timestep, first tile) + if i == 0 and tile_idx == 0: + self._woi_processed_flags = np.zeros(self.woi_pixel_xys.shape[0], dtype=bool) + + for n in range(self.woi_pixel_xys.shape[0]): + idx, row_global, col_global = self.woi_pixel_xys[n] + row_global = int(row_global) + col_global = int(col_global) + + # Check if WOI is in this tile's write window + if ( + write_win.row_off <= row_global < write_win.row_off + write_win.height + and write_win.col_off <= col_global < write_win.col_off + write_win.width + ): + # Convert global to tile-local coordinates + row_tile = row_global - tile.read_window.row_off + col_tile = col_global - tile.read_window.col_off + + # Extract wall data from voxelTable (uses tile-local coords) + temp_wall = voxelTable.loc[ + ((voxelTable["ypos"] == row_tile) & (voxelTable["xpos"] == col_tile)), "wallTemperature" + ].to_numpy() + K_in = voxelTable.loc[ + ((voxelTable["ypos"] == row_tile) & (voxelTable["xpos"] == col_tile)), "K_in" + ].to_numpy() + L_in = voxelTable.loc[ + ((voxelTable["ypos"] == row_tile) & (voxelTable["xpos"] == col_tile)), "L_in" + ].to_numpy() + wallShade = voxelTable.loc[ + ((voxelTable["ypos"] == row_tile) & (voxelTable["xpos"] == col_tile)), "wallShade" + ].to_numpy() + + result_row = { + "woi_idx": idx, + "woi_name": self.woi_names[idx], + "yyyy": self.environ_data.YYYY[i], + "id": self.environ_data.jday[i], + "it": self.environ_data.hours[i], + "imin": self.environ_data.minu[i], + "dectime": self.environ_data.dectime[i], + "Ta": self.environ_data.Ta[i], + "SVF": self.svf_data.svf[row_tile, col_tile], + "Ts": temp_wall, + "Kin": K_in, + "Lin": L_in, + "shade": wallShade, + "pixel_x": col_global, + "pixel_y": row_global, + } + self.woi_results.append(result_row) + + # Mark WOI as processed (first timestep only) + if i == 0 and hasattr(self, "_woi_processed_flags"): + self._woi_processed_flags[n] = True + + # Crop results to core (remove buffer) + core_slice = tile.core_slice() + Tmrt_core = Tmrt[core_slice] + + # Write outputs + time_code = time_codes[i] + + if self.config.output_tmrt: + common.write_raster_window( + str(self.config.output_dir) + "/Tmrt_" + time_code + ".tif", + Tmrt_core, + tile.write_window.to_slices(), + ) + if self.config.output_kup: + common.write_raster_window( + str(self.config.output_dir) + "/Kup_" + time_code + ".tif", + Kup[core_slice], + tile.write_window.to_slices(), + ) + if self.config.output_kdown: + common.write_raster_window( + str(self.config.output_dir) + "/Kdown_" + time_code + ".tif", + Kdown[core_slice], + tile.write_window.to_slices(), + ) + if self.config.output_lup: + common.write_raster_window( + str(self.config.output_dir) + "/Lup_" + time_code + ".tif", + Lup[core_slice], + tile.write_window.to_slices(), + ) + if self.config.output_ldown: + common.write_raster_window( + str(self.config.output_dir) + "/Ldown_" + time_code + ".tif", + Ldown[core_slice], + tile.write_window.to_slices(), + ) + if self.config.output_sh: + common.write_raster_window( + str(self.config.output_dir) + "/Shadow_" + time_code + ".tif", + shadow[core_slice], + tile.write_window.to_slices(), + ) + if self.config.output_kdiff: + common.write_raster_window( + str(self.config.output_dir) + "/Kdiff_" + time_code + ".tif", + dRad[core_slice], + tile.write_window.to_slices(), + ) + + # Aggregate Tmrt (handle NaN and inf values safely) + # Check for non-finite values and warn if found + if (~np.isfinite(Tmrt_core)).any(): + n_invalid = (~np.isfinite(Tmrt_core)).sum() + logger.warning( + f"Timestep {i + 1}, Tile {tile_idx + 1}: {n_invalid} non-finite Tmrt values detected" + ) + + tmrt_core_safe = np.nan_to_num(Tmrt_core, nan=0.0, posinf=0.0, neginf=0.0) + tmrt_agg_tiles[tile_idx] += tmrt_core_safe + + # Clean up tile data to free memory + self.svf_data = None + self.raster_data = None + self.shadow_mats = None + self.walls_data = None + + if not self.proceed: + break + + # Update state for next timestep + if next_firstdaytime is not None: + firstdaytime = next_firstdaytime + timestepdec = next_timestepdec + timeadd = next_timeadd + else: + # This should not happen unless all tiles failed/were skipped + logger.warning(f"No time state updates from timestep {i + 1}") + if i < num - 1: # Not the last timestep + logger.error("Missing time state updates before final timestep - results may be incorrect") + if i < num - 1: # Not the last timestep + logger.error("Missing time state updates before final timestep - results may be incorrect") + + # Abort if loop was broken + if not self.proceed: + return + + # Validate POI/WOI processing completeness + if hasattr(self, "_poi_processed_flags") and self.poi_pixel_xys is not None: + unprocessed_pois = np.where(~self._poi_processed_flags)[0] + if len(unprocessed_pois) > 0: + poi_list = unprocessed_pois.tolist()[:10] + logger.warning( + f"{len(unprocessed_pois)} POIs were outside all tile boundaries and not processed: indices {poi_list}" + ) + + if hasattr(self, "_woi_processed_flags") and self.woi_pixel_xys is not None: + unprocessed_wois = np.where(~self._woi_processed_flags)[0] + if len(unprocessed_wois) > 0: + woi_list = unprocessed_wois.tolist()[:10] + logger.warning( + f"{len(unprocessed_wois)} WOIs were outside all tile boundaries and not processed: indices {woi_list}" + ) + + # Save POI results + if self.poi_results: + self.save_poi_results() + + # Save WOI results + if self.woi_results: + self.save_woi_results() + + # Save Tree Planter results (if needed) + if self.config.output_tree_planter: + logger.info("Generating tree planter settings file...") + # We need shadow_mats for patch_option, but it was cleaned up + # Recreate it temporarily from the last tile + tiles_for_output = list(self.tile_manager.get_tiles()) + if len(tiles_for_output) == 0: + logger.error("Cannot generate tree planter settings: no tiles available") + else: + last_tile = tiles_for_output[-1] + svf_data_temp = SvfData(self.config, tile_spec=last_tile) + shadow_mats_temp = ShadowMatrices(self.config, self.params, svf_data_temp, tile_spec=last_tile) + + pos = 1 if self.params.Tmrt_params.Value.posture == "Standing" else 0 + + settingsHeader = [ + "UTC", + "posture", + "onlyglobal", + "landcover", + "anisotropic", + "cylinder", + "albedo_walls", + "albedo_ground", + "emissivity_walls", + "emissivity_ground", + "absK", + "absL", + "elevation", + "patch_option", + ] + settingsFmt = ( + "%i", + "%i", + "%i", + "%i", + "%i", + "%i", + "%1.2f", + "%1.2f", + "%1.2f", + "%1.2f", + "%1.2f", + "%1.2f", + "%1.2f", + "%i", ) + settingsData = np.array( + [ + [ + int(self.config.utc), + pos, + self.config.only_global, + self.config.use_landcover, + self.config.use_aniso, + self.config.person_cylinder, + self.params.Albedo.Effective.Value.Walls, + self.params.Albedo.Effective.Value.Cobble_stone_2014a, + self.params.Emissivity.Value.Walls, + self.params.Emissivity.Value.Cobble_stone_2014a, + self.params.Tmrt_params.Value.absK, + self.params.Tmrt_params.Value.absL, + self.location["altitude"], + shadow_mats_temp.patch_option, + ] + ], + dtype=np.float32, + ) + np.savetxt( + self.config.output_dir + "/treeplantersettings.txt", + settingsData, + fmt=settingsFmt, + header=", ".join(settingsHeader), + delimiter=" ", + ) + + # Write average Tmrt for all tiles + if num > 0: + for tile_idx, tile in enumerate(self.tile_manager.get_tiles()): + tmrt_avg_tile = tmrt_agg_tiles[tile_idx] / num + common.write_raster_window( + str(self.config.output_dir) + "/Tmrt_average.tif", + tmrt_avg_tile, + tile.write_window.to_slices(), + ) diff --git a/umep/functions/SOLWEIGpython/solweig_runner_core.py b/umep/functions/SOLWEIGpython/solweig_runner_core.py index 7fdfaae..72c3247 100644 --- a/umep/functions/SOLWEIGpython/solweig_runner_core.py +++ b/umep/functions/SOLWEIGpython/solweig_runner_core.py @@ -21,10 +21,19 @@ def __init__( params_json_path: str, amax_local_window_m: int = 100, amax_local_perc: float = 99.9, + use_tiled_loading: bool = False, + tile_size: int = 1024, ): config = SolweigConfig() config.from_file(config_path_str) - super().__init__(config, params_json_path, amax_local_window_m, amax_local_perc) + super().__init__( + config, + params_json_path, + amax_local_window_m, + amax_local_perc, + use_tiled_loading, + tile_size, + ) def prep_progress(self, num: int) -> None: """Prepare progress for environment.""" @@ -41,8 +50,8 @@ def load_poi_data(self) -> Tuple[Any, Any]: """Load points of interest (POIs) from a file.""" poi_path_str = str(common.check_path(self.config.poi_path)) pois_gdf = gpd.read_file(poi_path_str) - trf = Affine.from_gdal(*self.raster_data.trf_arr) - self.poi_pixel_xys = np.zeros((len(pois_gdf), 3)) - 999 + trf = Affine.from_gdal(*self.transform) + self.poi_pixel_xys = np.zeros((len(pois_gdf), 3), dtype=np.float32) - 999 self.poi_names = [] for n, (idx, row) in enumerate(pois_gdf.iterrows()): self.poi_names.append(idx) @@ -52,15 +61,15 @@ def load_poi_data(self) -> Tuple[Any, Any]: def save_poi_results(self) -> None: """Save points of interest (POIs) results to a file.""" # Convert pixel coordinates to geographic coordinates - xs = [r["col_idx"] * self.raster_data.trf_arr[1] + self.raster_data.trf_arr[0] for r in self.poi_results] - ys = [r["row_idx"] * self.raster_data.trf_arr[1] + self.raster_data.trf_arr[3] for r in self.poi_results] + xs = [r["col_idx"] * self.transform[1] + self.transform[0] for r in self.poi_results] + ys = [r["row_idx"] * self.transform[1] + self.transform[3] for r in self.poi_results] pois_gdf = gpd.GeoDataFrame( self.poi_results, geometry=gpd.points_from_xy( xs, ys, ), - crs=self.raster_data.crs_wkt, + crs=self.crs, ) # Create a datetime column for multi-index pois_gdf["snapshot"] = pd.to_datetime( @@ -79,8 +88,8 @@ def save_poi_results(self) -> None: def load_woi_data(self) -> Tuple[Any, Any]: """Load walls of interest (WOIs) from a file.""" woi_gdf = gpd.read_file(self.config.woi_file) - trf = Affine.from_gdal(*self.raster_data.trf_arr) - self.woi_pixel_xys = np.zeros((len(woi_gdf), 3)) - 999 + trf = Affine.from_gdal(*self.transform) + self.woi_pixel_xys = np.zeros((len(woi_gdf), 3), dtype=np.float32) - 999 self.woi_names = [] for n, (idx, row) in enumerate(woi_gdf.iterrows()): self.woi_names.append(idx) @@ -90,15 +99,15 @@ def load_woi_data(self) -> Tuple[Any, Any]: def save_woi_results(self) -> None: """Save walls of interest (WOIs) results to a file.""" # Convert pixel coordinates to geographic coordinates - xs = [r["col_idx"] * self.raster_data.trf_arr[1] + self.raster_data.trf_arr[0] for r in self.woi_results] - ys = [r["row_idx"] * self.raster_data.trf_arr[1] + self.raster_data.trf_arr[3] for r in self.woi_results] + xs = [r["col_idx"] * self.transform[1] + self.transform[0] for r in self.woi_results] + ys = [r["row_idx"] * self.transform[1] + self.transform[3] for r in self.woi_results] woi_gdf = gpd.GeoDataFrame( self.woi_results, geometry=gpd.points_from_xy( xs, ys, ), - crs=self.raster_data.crs_wkt, + crs=self.crs, ) # Create a datetime column for multi-index woi_gdf["snapshot"] = pd.to_datetime( @@ -155,7 +164,7 @@ def load_epw_weather(self) -> EnvironData: "Wind": filtered_df["wind_speed"], "RH": filtered_df["relative_humidity"], "Tair": filtered_df["temp_air"], - "pres": filtered_df["atmospheric_pressure"].astype(float), # Pascal, ensure float + "pres": filtered_df["atmospheric_pressure"].astype(np.float32), # Pascal, ensure float32 "rain": -999, "Kdown": filtered_df["ghi"], "snow": filtered_df["snow_depth"], @@ -182,17 +191,17 @@ def load_epw_weather(self) -> EnvironData: return EnvironData( self.config, self.params, - YYYY=umep_df["iy"].to_numpy(), - DOY=umep_df["id"].to_numpy(), - hours=umep_df["it"].to_numpy(), - minu=umep_df["imin"].to_numpy(), - Ta=umep_df["Tair"].to_numpy(), - RH=umep_df["RH"].to_numpy(), - radG=umep_df["Kdown"].to_numpy(), - radD=umep_df["ldown"].to_numpy(), - radI=umep_df["Kdiff"].to_numpy(), - P=umep_df["pres"].to_numpy() / 100.0, # convert from Pa to hPa, - Ws=umep_df["Wind"].to_numpy(), + YYYY=umep_df["iy"].to_numpy(dtype=np.float32), + DOY=umep_df["id"].to_numpy(dtype=np.float32), + hours=umep_df["it"].to_numpy(dtype=np.float32), + minu=umep_df["imin"].to_numpy(dtype=np.float32), + Ta=umep_df["Tair"].to_numpy(dtype=np.float32), + RH=umep_df["RH"].to_numpy(dtype=np.float32), + radG=umep_df["Kdown"].to_numpy(dtype=np.float32), + radD=umep_df["ldown"].to_numpy(dtype=np.float32), + radI=umep_df["Kdiff"].to_numpy(dtype=np.float32), + P=umep_df["pres"].to_numpy(dtype=np.float32) / 100.0, # convert from Pa to hPa, + Ws=umep_df["Wind"].to_numpy(dtype=np.float32), location=self.location, UTC=self.config.utc, ) diff --git a/umep/functions/SOLWEIGpython/solweig_runner_qgis.py b/umep/functions/SOLWEIGpython/solweig_runner_qgis.py index ed628f3..e6c1d6b 100644 --- a/umep/functions/SOLWEIGpython/solweig_runner_qgis.py +++ b/umep/functions/SOLWEIGpython/solweig_runner_qgis.py @@ -15,11 +15,20 @@ def __init__( feedback: Any, amax_local_window_m: int = 100, amax_local_perc: float = 99.9, + use_tiled_loading: bool = False, + tile_size: int = 1024, ): """ """ config = SolweigConfig() config.from_file(config_path_str) - super().__init__(config, params_json_path, amax_local_window_m, amax_local_perc) + super().__init__( + config, + params_json_path, + amax_local_window_m, + amax_local_perc, + use_tiled_loading, + tile_size, + ) self.progress = feedback def prep_progress(self, num: int) -> None: @@ -37,9 +46,9 @@ def iter_progress(self) -> bool: def load_poi_data(self) -> Tuple[Any, Any]: """Load points of interest (POIs) from a file.""" - scale = 1 / self.raster_data.trf_arr[1] + scale = 1 / self.transform.trf_arr[1] poi_names, poi_pixel_xys = pointOfInterest( - self.config.poi_file, self.config.poi_field, scale, self.raster_data.trf_arr + self.config.poi_file, self.config.poi_field, scale, self.transform.trf_arr ) self.poi_names = poi_names self.poi_pixel_xys = poi_pixel_xys @@ -55,16 +64,16 @@ def save_poi_results(self) -> None: with open(output_path, "w") as f: f.write("\t".join(header) + "\n") for result in self.poi_pixel_xys: - lng = result["col_idx"] * self.raster_data.trf_arr[1] + self.raster_data.trf_arr[0] - lat = result["row_idx"] * self.raster_data.trf_arr[1] + self.raster_data.trf_arr[3] + lng = result["col_idx"] * self.transform.trf_arr[1] + self.transform.trf_arr[0] + lat = result["row_idx"] * self.transform.trf_arr[1] + self.transform.trf_arr[3] row_values = list(result.values()) + [lng, lat] f.write("\t".join(map(str, row_values)) + "\n") def load_woi_data(self) -> Tuple[Any, Any]: """Load walls of interest (WOIs) from a file.""" - scale = 1 / self.raster_data.trf_arr[1] + scale = 1 / self.transform.trf_arr[1] woi_names, woi_pixel_xys = pointOfInterest( - self.config.woi_file, self.config.woi_field, scale, self.raster_data.trf_arr + self.config.woi_file, self.config.woi_field, scale, self.transform.trf_arr ) self.woi_names = woi_names self.woi_pixel_xys = woi_pixel_xys @@ -80,7 +89,7 @@ def save_woi_results(self) -> None: with open(output_path, "w") as f: f.write("\t".join(header) + "\n") for result in self.woi_pixel_xys: - lng = result["col_idx"] * self.raster_data.trf_arr[1] + self.raster_data.trf_arr[0] - lat = result["row_idx"] * self.raster_data.trf_arr[1] + self.raster_data.trf_arr[3] + lng = result["col_idx"] * self.transform.trf_arr[1] + self.transform.trf_arr[0] + lat = result["row_idx"] * self.transform.trf_arr[1] + self.transform.trf_arr[3] row_values = list(result.values()) + [lng, lat] f.write("\t".join(map(str, row_values)) + "\n") diff --git a/umep/functions/dailyshading.py b/umep/functions/dailyshading.py index 0863d89..7da9530 100644 --- a/umep/functions/dailyshading.py +++ b/umep/functions/dailyshading.py @@ -60,7 +60,7 @@ def dailyshading( vegdem2[vegdem2 == dsm] = 0 # Bush separation - bush = np.logical_not((vegdem2 * vegdem)) * vegdem + bush = np.logical_not(vegdem2 * vegdem) * vegdem shtot = np.zeros((dsm_height, dsm_width)) @@ -71,7 +71,7 @@ def dailyshading( alt = np.zeros(itera) azi = np.zeros(itera) - hour = int(0) + hour = 0 index = 0 time = dict() time["UTC"] = UTC @@ -95,13 +95,7 @@ def dailyshading( doy = day_of_year(year, month, day) - ut_time = ( - doy - - 1.0 - + ((hour - dst) / 24.0) - + (minu / (60.0 * 24.0)) - + (0.0 / (60.0 * 60.0 * 24.0)) - ) + ut_time = doy - 1.0 + ((hour - dst) / 24.0) + (minu / (60.0 * 24.0)) + (0.0 / (60.0 * 60.0 * 24.0)) if ut_time < 0: year = year - 1 @@ -131,39 +125,28 @@ def dailyshading( if time["hour"] == 24: time["hour"] = 0 - time_vector = dt.datetime( - year, month, day, time["hour"], time["min"], time["sec"] - ) + time_vector = dt.datetime(year, month, day, time["hour"], time["min"], time["sec"]) timestr = time_vector.strftime("%Y%m%d_%H%M") if alt[i] > 0: if wallshadow == 1: # Include wall shadows (Issue #121) if usevegdem == 1: - vegsh, sh, _, wallsh, _, wallshve, _, _ = ( - shadowingfunction_wallheight_23( - dsm, - vegdem, - vegdem2, - azi[i], - alt[i], - scale, - amaxvalue, - bush, - walls, - dirwalls * np.pi / 180.0, - ) + vegsh, sh, _, wallsh, _, wallshve, _, _ = shadowingfunction_wallheight_23( + dsm, + vegdem, + vegdem2, + azi[i], + alt[i], + scale, + amaxvalue, + bush, + walls, + dirwalls * np.pi / 180.0, ) # create output folders sh = sh - (1 - vegsh) * (1 - psi) if onetime == 0: - filenamewallshve = ( - folder - + "/facade_shdw_veg/facade_shdw_veg_" - + timestr - + "_LST.tif" - ) - common.save_raster( - filenamewallshve, wallshve, dsm_transf, dsm_crs - ) + filenamewallshve = folder + "/facade_shdw_veg/facade_shdw_veg_" + timestr + "_LST.tif" + common.save_raster(filenamewallshve, wallshve, dsm_transf, dsm_crs, coerce_f64_to_f32=True) else: sh, wallsh, _, _, _ = shadowingfunction_wallheight_13( dsm, azi[i], alt[i], scale, walls, dirwalls * np.pi / 180.0 @@ -171,23 +154,14 @@ def dailyshading( # shtot = shtot + sh if onetime == 0: - filename = ( - folder + "/shadow_ground/shadow_ground_" + timestr + "_LST.tif" - ) - common.save_raster(filename, sh, dsm_transf, dsm_crs) - filenamewallsh = ( - folder - + "/facade_shdw_bldgs/facade_shdw_bldgs_" - + timestr - + "_LST.tif" - ) - common.save_raster(filenamewallsh, wallsh, dsm_transf, dsm_crs) + filename = folder + "/shadow_ground/shadow_ground_" + timestr + "_LST.tif" + common.save_raster(filename, sh, dsm_transf, dsm_crs, coerce_f64_to_f32=True) + filenamewallsh = folder + "/facade_shdw_bldgs/facade_shdw_bldgs_" + timestr + "_LST.tif" + common.save_raster(filenamewallsh, wallsh, dsm_transf, dsm_crs, coerce_f64_to_f32=True) else: if usevegdem == 0: - sh = shadow.shadowingfunctionglobalradiation( - dsm, azi[i], alt[i], scale, 0 - ) + sh = shadow.shadowingfunctionglobalradiation(dsm, azi[i], alt[i], scale, 0) # shtot = shtot + sh else: shadowresult = shadow.shadowingfunction_20( @@ -208,7 +182,7 @@ def dailyshading( if onetime == 0: filename = folder + "/Shadow_" + timestr + "_LST.tif" - common.save_raster(filename, sh, dsm_transf, dsm_crs) + common.save_raster(filename, sh, dsm_transf, dsm_crs, coerce_f64_to_f32=True) shtot = shtot + sh index += 1 @@ -217,15 +191,11 @@ def dailyshading( if wallshadow == 1: if onetime == 1: - filenamewallsh = ( - folder + "/facade_shdw_bldgs/facade_shdw_bldgs_" + timestr + "_LST.tif" - ) - common.save_raster(filenamewallsh, wallsh, dsm_transf, dsm_crs) + filenamewallsh = folder + "/facade_shdw_bldgs/facade_shdw_bldgs_" + timestr + "_LST.tif" + common.save_raster(filenamewallsh, wallsh, dsm_transf, dsm_crs, coerce_f64_to_f32=True) if usevegdem == 1: - filenamewallshve = ( - folder + "/facade_shdw_veg/facade_shdw_veg_" + timestr + "_LST.tif" - ) - common.save_raster(filenamewallshve, wallshve, dsm_transf, dsm_crs) + filenamewallshve = folder + "/facade_shdw_veg/facade_shdw_veg_" + timestr + "_LST.tif" + common.save_raster(filenamewallshve, wallshve, dsm_transf, dsm_crs, coerce_f64_to_f32=True) shadowresult = {"shfinal": shfinal, "time_vector": time_vector} diff --git a/umep/shadow_generator_algorithm.py b/umep/shadow_generator_algorithm.py index 244c78c..55e8214 100644 --- a/umep/shadow_generator_algorithm.py +++ b/umep/shadow_generator_algorithm.py @@ -65,7 +65,7 @@ def generate_shadows( trans_veg: float = 3, trunk_zone_ht_perc: float = 0.25, ): - dsm, dsm_transf, dsm_crs, _dsm_nd = common.load_raster(dsm_path, bbox) + dsm, dsm_transf, dsm_crs, _dsm_nd = common.load_raster(dsm_path, bbox, coerce_f64_to_f32=True) dsm_height, dsm_width = dsm.shape # y rows by x cols dsm_scale = 1 / dsm_transf[1] # y is flipped - so return max for lower row @@ -85,7 +85,9 @@ def generate_shadows( if veg_dsm_path is not None: usevegdem = 1 - veg_dsm, veg_dsm_transf, veg_dsm_crs, _veg_dsm_nd = common.load_raster(veg_dsm_path, bbox) + veg_dsm, veg_dsm_transf, veg_dsm_crs, _veg_dsm_nd = common.load_raster( + veg_dsm_path, bbox, coerce_f64_to_f32=True + ) veg_dsm_height, veg_dsm_width = veg_dsm.shape if not (veg_dsm_width == dsm_width) & (veg_dsm_height == dsm_height): raise ValueError("Error in Vegetation Canopy DSM: All rasters must be of same extent and resolution") @@ -102,11 +104,11 @@ def generate_shadows( if wall_aspect_path and wall_ht_path: print("Facade shadow scheme activated") wallsh = 1 - wh_rast, wh_transf, wh_crs, _wh_nd = common.load_raster(wall_ht_path, bbox) + wh_rast, wh_transf, wh_crs, _wh_nd = common.load_raster(wall_ht_path, bbox, coerce_f64_to_f32=True) wh_height, wh_width = wh_rast.shape if not (wh_width == dsm_width) & (wh_height == dsm_height): raise ValueError("Error in Wall height raster: All rasters must be of same extent and resolution") - wa_rast, wa_transf, wa_crs, _wa_nd = common.load_raster(wall_aspect_path, bbox) + wa_rast, wa_transf, wa_crs, _wa_nd = common.load_raster(wall_aspect_path, bbox, coerce_f64_to_f32=True) wa_height, wa_width = wa_rast.shape if not (wa_width == dsm_width) & (wa_height == dsm_height): raise ValueError("Error in Wall aspect raster: All rasters must be of same extent and resolution") @@ -168,4 +170,10 @@ def generate_shadows( ) shfinal = shadowresult["shfinal"] - common.save_raster(out_path_str + "/shadow_composite.tif", shfinal, dsm_transf, dsm_crs) + common.save_raster( + out_path_str + "/shadow_composite.tif", + shfinal, + dsm_transf, + dsm_crs, + coerce_f64_to_f32=True, + ) diff --git a/umep/skyviewfactor_algorithm.py b/umep/skyviewfactor_algorithm.py index c5e12f1..7c89e3c 100644 --- a/umep/skyviewfactor_algorithm.py +++ b/umep/skyviewfactor_algorithm.py @@ -45,6 +45,8 @@ # %% import logging import os +import shutil +import tempfile import zipfile from pathlib import Path @@ -52,6 +54,7 @@ from umep import class_configs, common from umep.functions import svf_functions as svf +from umep.tile_manager import TileManager logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -68,19 +71,297 @@ def generate_svf( trunk_ratio_perc: float = 25, amax_local_window_m: int = 100, amax_local_perc: float = 99.9, + use_tiled_loading: bool = False, + tile_size: int = 1000, + save_shadowmats: bool = True, ): + """ + Generate Sky View Factor outputs. + + Args: + save_shadowmats: Save shadow matrices (required for SOLWEIG anisotropic sky). + Saved as uint8 (75% smaller than float32). Set to False only + if you don't need SOLWEIG's anisotropic modeling. + """ out_path = Path(out_dir) out_path.mkdir(parents=True, exist_ok=True) out_path_str = str(out_path) + # Open the DSM file to get metadata + # If tiled, we only load metadata first + if use_tiled_loading: + dsm_meta = common.get_raster_metadata(dsm_path) + dsm_trf = dsm_meta["transform"] + dsm_crs = dsm_meta["crs"] + dsm_nd = dsm_meta["nodata"] + rows = dsm_meta["rows"] + cols = dsm_meta["cols"] + + # Transform is already in GDAL format [c, a, b, f, d, e] + dsm_pix_size = dsm_trf[1] + dsm_scale = 1 / dsm_pix_size + + # Calculate conservative global amax for buffer estimation + # Load sample data to estimate terrain complexity + sample_dsm, _, _, _ = common.load_raster(dsm_path, bbox=None, coerce_f64_to_f32=True) + + if dem_path is None: + # Without DEM, use DSM range as conservative estimate + global_amax = float(np.nanmax(sample_dsm) - np.nanmin(sample_dsm)) + else: + # With DEM, estimate from height differences + sample_dem, _, _, _ = common.load_raster(dem_path, bbox=None, coerce_f64_to_f32=True) + height_diff = sample_dsm - sample_dem + global_amax = float(np.nanpercentile(height_diff[~np.isnan(height_diff)], 99.9)) + del sample_dem + + # Add safety margin and cap at reasonable maximum + global_amax = min(global_amax * 1.2, 200.0) # 20% safety margin, max 200m + del sample_dsm + + logger.info(f"Estimated global amax: {global_amax:.1f}m for buffer calculation") + + # Initialize TileManager with calculated buffer + tile_manager = TileManager(rows, cols, tile_size, dsm_pix_size, buffer_dist=global_amax) + + if len(tile_manager.tiles) == 0: + raise ValueError(f"TileManager generated 0 tiles for {rows}x{cols} raster with tile_size={tile_size}") + + logger.info(f"Initialized TileManager with {len(tile_manager.tiles)} tiles.") + + # Initialize output rasters + # We need to create empty rasters for all outputs + output_files = ["input-dsm.tif", "svf.tif", "svfE.tif", "svfS.tif", "svfW.tif", "svfN.tif"] + if dem_path: + output_files.append("input-dem.tif") + if cdsm_path: + output_files.extend( + [ + "input-cdsm.tif", + "input-tdsm.tif", + "svfveg.tif", + "svfEveg.tif", + "svfSveg.tif", + "svfWveg.tif", + "svfNveg.tif", + "svfaveg.tif", + "svfEaveg.tif", + "svfSaveg.tif", + "svfWaveg.tif", + "svfNaveg.tif", + "svf_total.tif", + ] + ) + + for fname in output_files: + common.create_empty_raster(out_path_str + "/" + fname, rows, cols, dsm_trf, dsm_crs, nodata=-9999.0) + + # Initialize memory-mapped arrays for shadow matrices (only if needed) + if save_shadowmats: + # 153 patches is standard for this algorithm + patches = 153 + shmat_shape = (rows, cols, patches) + + # Create temp file paths + temp_dir = tempfile.mkdtemp(dir=out_path_str) + shmat_path = os.path.join(temp_dir, "shmat.dat") + vegshmat_path = os.path.join(temp_dir, "vegshmat.dat") + vbshvegshmat_path = os.path.join(temp_dir, "vbshvegshmat.dat") + + # Use uint8 instead of float32 for 75% space savings (shadow mats are binary 0/1) + # Calculate memory requirements + memmap_size_mb = (shmat_shape[0] * shmat_shape[1] * shmat_shape[2] * 1) / (1024 * 1024) + logger.info(f"Creating memory-mapped arrays: {memmap_size_mb * 3:.1f} MB total (3 arrays, uint8)") + + # Create memmapped arrays with error handling + try: + shmat_mem = np.memmap(shmat_path, dtype="uint8", mode="w+", shape=shmat_shape) + vegshmat_mem = np.memmap(vegshmat_path, dtype="uint8", mode="w+", shape=shmat_shape) + vbshvegshmat_mem = np.memmap(vbshvegshmat_path, dtype="uint8", mode="w+", shape=shmat_shape) + except OSError as e: + shutil.rmtree(temp_dir, ignore_errors=True) + raise OSError( + f"Failed to create memory-mapped arrays ({memmap_size_mb * 3:.1f} MB). " + f"Check disk space and permissions in {out_path_str}. Error: {e}" + ) from e + + trans_veg = trans_veg_perc / 100.0 + trunk_ratio = trunk_ratio_perc / 100.0 + + # Iterate over tiles + for i, tile in enumerate(tile_manager.get_tiles()): + logger.info(f"Processing tile {i + 1}/{len(tile_manager.tiles)}") + + # Load inputs for tile (with overlap) + dsm_tile = common.read_raster_window(dsm_path, tile.full_slice, band=1) + + dem_tile = None + if dem_path: + dem_tile = common.read_raster_window(dem_path, tile.full_slice, band=1) + + cdsm_tile = None + if cdsm_path: + cdsm_tile = common.read_raster_window(cdsm_path, tile.full_slice, band=1) + + # Preprocess + dsm_tile, dem_tile, cdsm_tile, tdsm_tile, amax = class_configs.raster_preprocessing( + dsm_tile, + dem_tile, + cdsm_tile, + None, + trunk_ratio, + dsm_pix_size, + amax_local_window_m=amax_local_window_m, + amax_local_perc=amax_local_perc, + quiet=True, + ) + + # Compute SVF + use_cdsm_bool = cdsm_path is not None + ret = svf.svfForProcessing153(dsm_tile, cdsm_tile, tdsm_tile, dsm_scale, use_cdsm_bool, amax) + + # Write outputs (core only) + core_slice = tile.core_slice() + write_win = tile.write_window.to_slices() + + # Helper to write core - bind loop vars with default args + def write_core(fname, data, cs=core_slice, ww=write_win): + core_data = data[cs] + common.write_raster_window(out_path_str + "/" + fname, core_data, ww) + + write_core("input-dsm.tif", dsm_tile) + if dem_tile is not None: + write_core("input-dem.tif", dem_tile) + if cdsm_tile is not None: + write_core("input-cdsm.tif", cdsm_tile) + write_core("input-tdsm.tif", tdsm_tile) + + write_core("svf.tif", ret["svf"]) + write_core("svfE.tif", ret["svfE"]) + write_core("svfS.tif", ret["svfS"]) + write_core("svfW.tif", ret["svfW"]) + write_core("svfN.tif", ret["svfN"]) + + if use_cdsm_bool: + write_core("svfveg.tif", ret["svfveg"]) + write_core("svfEveg.tif", ret["svfEveg"]) + write_core("svfSveg.tif", ret["svfSveg"]) + write_core("svfWveg.tif", ret["svfWveg"]) + write_core("svfNveg.tif", ret["svfNveg"]) + write_core("svfaveg.tif", ret["svfaveg"]) + write_core("svfEaveg.tif", ret["svfEaveg"]) + write_core("svfSaveg.tif", ret["svfSaveg"]) + write_core("svfWaveg.tif", ret["svfWaveg"]) + write_core("svfNaveg.tif", ret["svfNaveg"]) + + # Calculate total SVF + svftotal_tile = ret["svf"] - (1 - ret["svfveg"]) * (1 - trans_veg) + write_core("svf_total.tif", svftotal_tile) + + # Write shadow matrices to memmap (if saving) + if save_shadowmats: + # Extract core for 3D arrays - use core_slice with added dimension + core_slice_3d = core_slice + (slice(None),) + + # Destination slice in memmap - use write_window directly + write_slice_3d = tile.write_window.to_slices() + (slice(None),) + + # Convert to uint8 (shadow matrices are binary 0/1) + shmat_mem[write_slice_3d] = (ret["shmat"][core_slice_3d] * 255).astype(np.uint8) + vegshmat_mem[write_slice_3d] = (ret["vegshmat"][core_slice_3d] * 255).astype(np.uint8) + vbshvegshmat_mem[write_slice_3d] = (ret["vbshvegshmat"][core_slice_3d] * 255).astype(np.uint8) + + # Flush memmaps periodically? + if i % 10 == 0: + shmat_mem.flush() + vegshmat_mem.flush() + vbshvegshmat_mem.flush() + + # Save shadow matrices (if requested) + if save_shadowmats: + # Flush final + shmat_mem.flush() + vegshmat_mem.flush() + vbshvegshmat_mem.flush() + + # Save shadow matrices as compressed npz (uint8 format) + # We read from the memmapped files + logger.info("Saving shadow matrices to npz (uint8 format, 75% smaller)...") + np.savez_compressed( + out_path_str + "/" + "shadowmats.npz", + shadowmat=shmat_mem, + vegshadowmat=vegshmat_mem, + vbshmat=vbshvegshmat_mem, + dtype="uint8", # Store metadata about dtype + ) + + # Cleanup temp + del shmat_mem + del vegshmat_mem + del vbshvegshmat_mem + shutil.rmtree(temp_dir) + else: + logger.info("Skipping shadow matrix save (not needed for this workflow)") + + # Zip SVF files (same as standard) + zip_filepath = out_path_str + "/" + "svfs.zip" + if os.path.isfile(zip_filepath): + os.remove(zip_filepath) + + with zipfile.ZipFile(zip_filepath, "a") as zippo: + zippo.write(out_path_str + "/" + "svf.tif", "svf.tif") + zippo.write(out_path_str + "/" + "svfE.tif", "svfE.tif") + zippo.write(out_path_str + "/" + "svfS.tif", "svfS.tif") + zippo.write(out_path_str + "/" + "svfW.tif", "svfW.tif") + zippo.write(out_path_str + "/" + "svfN.tif", "svfN.tif") + + if cdsm_path: + zippo.write(out_path_str + "/" + "svfveg.tif", "svfveg.tif") + zippo.write(out_path_str + "/" + "svfEveg.tif", "svfEveg.tif") + zippo.write(out_path_str + "/" + "svfSveg.tif", "svfSveg.tif") + zippo.write(out_path_str + "/" + "svfWveg.tif", "svfWveg.tif") + zippo.write(out_path_str + "/" + "svfNveg.tif", "svfNveg.tif") + zippo.write(out_path_str + "/" + "svfaveg.tif", "svfaveg.tif") + zippo.write(out_path_str + "/" + "svfEaveg.tif", "svfEaveg.tif") + zippo.write(out_path_str + "/" + "svfSaveg.tif", "svfSaveg.tif") + zippo.write(out_path_str + "/" + "svfWaveg.tif", "svfWaveg.tif") + zippo.write(out_path_str + "/" + "svfNaveg.tif", "svfNaveg.tif") + + # Remove individual files + files_to_remove = ["svf.tif", "svfE.tif", "svfS.tif", "svfW.tif", "svfN.tif"] + if cdsm_path: + files_to_remove.extend( + [ + "svfveg.tif", + "svfEveg.tif", + "svfSveg.tif", + "svfWveg.tif", + "svfNveg.tif", + "svfaveg.tif", + "svfEaveg.tif", + "svfSaveg.tif", + "svfWaveg.tif", + "svfNaveg.tif", + ] + ) + + for f in files_to_remove: + try: + os.remove(out_path_str + "/" + f) + except OSError as e: + logger.warning(f"Could not remove temporary file {f}: {e}") + + return + + # Standard execution (non-tiled) # Open the DSM file - dsm, dsm_trf, dsm_crs, dsm_nd = common.load_raster(dsm_path, bbox) + dsm, dsm_trf, dsm_crs, dsm_nd = common.load_raster(dsm_path, bbox, coerce_f64_to_f32=True) dsm_pix_size = dsm_trf[1] dsm_scale = 1 / dsm_pix_size dem = None if dem_path is not None: - dem, dem_trf, dem_crs, _dem_nd = common.load_raster(dem_path, bbox) + dem, dem_trf, dem_crs, _dem_nd = common.load_raster(dem_path, bbox, coerce_f64_to_f32=True) assert dem.shape == dsm.shape, "Mismatching raster shapes for DSM and DEM." assert np.allclose(dsm_trf, dem_trf), "Mismatching spatial transform for DSM and DEM." assert dem_crs == dsm_crs, "Mismatching CRS for DSM and DEM." @@ -89,7 +370,7 @@ def generate_svf( cdsm = None if cdsm_path is not None: use_cdsm = True - cdsm, cdsm_trf, cdsm_crs, _cdsm_nd = common.load_raster(cdsm_path, bbox) + cdsm, cdsm_trf, cdsm_crs, _cdsm_nd = common.load_raster(cdsm_path, bbox, coerce_f64_to_f32=True) assert cdsm.shape == dsm.shape, "Mismatching raster shapes for DSM and CDSM." assert np.allclose(dsm_trf, cdsm_trf), "Mismatching spatial transform for DSM and CDSM." assert cdsm_crs == dsm_crs, "Mismatching CRS for DSM and CDSM." @@ -118,6 +399,7 @@ def generate_svf( dsm_trf, dsm_crs, dsm_nd, + coerce_f64_to_f32=True, ) if dem is not None: common.save_raster( @@ -126,6 +408,7 @@ def generate_svf( dsm_trf, dsm_crs, dsm_nd, + coerce_f64_to_f32=True, ) if use_cdsm: common.save_raster( @@ -134,6 +417,7 @@ def generate_svf( dsm_trf, dsm_crs, dsm_nd, + coerce_f64_to_f32=True, ) common.save_raster( out_path_str + "/input-tdsm.tif", @@ -141,6 +425,7 @@ def generate_svf( dsm_trf, dsm_crs, dsm_nd, + coerce_f64_to_f32=True, ) # compute @@ -153,11 +438,11 @@ def generate_svf( svfbuN = ret["svfN"] # Save the rasters using rasterio - common.save_raster(out_path_str + "/" + "svf.tif", svfbu, dsm_trf, dsm_crs) - common.save_raster(out_path_str + "/" + "svfE.tif", svfbuE, dsm_trf, dsm_crs) - common.save_raster(out_path_str + "/" + "svfS.tif", svfbuS, dsm_trf, dsm_crs) - common.save_raster(out_path_str + "/" + "svfW.tif", svfbuW, dsm_trf, dsm_crs) - common.save_raster(out_path_str + "/" + "svfN.tif", svfbuN, dsm_trf, dsm_crs) + common.save_raster(out_path_str + "/" + "svf.tif", svfbu, dsm_trf, dsm_crs, coerce_f64_to_f32=True) + common.save_raster(out_path_str + "/" + "svfE.tif", svfbuE, dsm_trf, dsm_crs, coerce_f64_to_f32=True) + common.save_raster(out_path_str + "/" + "svfS.tif", svfbuS, dsm_trf, dsm_crs, coerce_f64_to_f32=True) + common.save_raster(out_path_str + "/" + "svfW.tif", svfbuW, dsm_trf, dsm_crs, coerce_f64_to_f32=True) + common.save_raster(out_path_str + "/" + "svfN.tif", svfbuN, dsm_trf, dsm_crs, coerce_f64_to_f32=True) # Create or update the ZIP file zip_filepath = out_path_str + "/" + "svfs.zip" @@ -194,16 +479,16 @@ def generate_svf( svfNaveg = ret["svfNaveg"] # Save vegetation rasters - common.save_raster(out_path_str + "/" + "svfveg.tif", svfveg, dsm_trf, dsm_crs) - common.save_raster(out_path_str + "/" + "svfEveg.tif", svfEveg, dsm_trf, dsm_crs) - common.save_raster(out_path_str + "/" + "svfSveg.tif", svfSveg, dsm_trf, dsm_crs) - common.save_raster(out_path_str + "/" + "svfWveg.tif", svfWveg, dsm_trf, dsm_crs) - common.save_raster(out_path_str + "/" + "svfNveg.tif", svfNveg, dsm_trf, dsm_crs) - common.save_raster(out_path_str + "/" + "svfaveg.tif", svfaveg, dsm_trf, dsm_crs) - common.save_raster(out_path_str + "/" + "svfEaveg.tif", svfEaveg, dsm_trf, dsm_crs) - common.save_raster(out_path_str + "/" + "svfSaveg.tif", svfSaveg, dsm_trf, dsm_crs) - common.save_raster(out_path_str + "/" + "svfWaveg.tif", svfWaveg, dsm_trf, dsm_crs) - common.save_raster(out_path_str + "/" + "svfNaveg.tif", svfNaveg, dsm_trf, dsm_crs) + common.save_raster(out_path_str + "/" + "svfveg.tif", svfveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) + common.save_raster(out_path_str + "/" + "svfEveg.tif", svfEveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) + common.save_raster(out_path_str + "/" + "svfSveg.tif", svfSveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) + common.save_raster(out_path_str + "/" + "svfWveg.tif", svfWveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) + common.save_raster(out_path_str + "/" + "svfNveg.tif", svfNveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) + common.save_raster(out_path_str + "/" + "svfaveg.tif", svfaveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) + common.save_raster(out_path_str + "/" + "svfEaveg.tif", svfEaveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) + common.save_raster(out_path_str + "/" + "svfSaveg.tif", svfSaveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) + common.save_raster(out_path_str + "/" + "svfWaveg.tif", svfWaveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) + common.save_raster(out_path_str + "/" + "svfNaveg.tif", svfNaveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) # Add vegetation rasters to the ZIP file with zipfile.ZipFile(zip_filepath, "a") as zippo: @@ -234,16 +519,22 @@ def generate_svf( svftotal = svfbu - (1 - svfveg) * (1 - trans_veg) # Save the final svftotal raster - common.save_raster(out_path_str + "/" + "svf_total.tif", svftotal, dsm_trf, dsm_crs) - - # Save shadow matrices as compressed npz - shmat = ret["shmat"] - vegshmat = ret["vegshmat"] - vbshvegshmat = ret["vbshvegshmat"] - - np.savez_compressed( - out_path_str + "/" + "shadowmats.npz", - shadowmat=shmat, - vegshadowmat=vegshmat, - vbshmat=vbshvegshmat, - ) + common.save_raster(out_path_str + "/" + "svf_total.tif", svftotal, dsm_trf, dsm_crs, coerce_f64_to_f32=True) + + # Save shadow matrices as compressed npz (only if requested) + if save_shadowmats: + shmat = ret["shmat"] + vegshmat = ret["vegshmat"] + vbshvegshmat = ret["vbshvegshmat"] + + # Convert to uint8 for 75% space savings (shadow matrices are binary 0/1) + logger.info("Saving shadow matrices to npz (uint8 format, 75% smaller)...") + np.savez_compressed( + out_path_str + "/" + "shadowmats.npz", + shadowmat=(shmat * 255).astype(np.uint8), + vegshadowmat=(vegshmat * 255).astype(np.uint8), + vbshmat=(vbshvegshmat * 255).astype(np.uint8), + dtype="uint8", # Store metadata about dtype + ) + else: + logger.info("Skipping shadow matrix save (not needed for this workflow)") diff --git a/umep/solweig_algorithm.py b/umep/solweig_algorithm.py index baed3d4..5e761f8 100644 --- a/umep/solweig_algorithm.py +++ b/umep/solweig_algorithm.py @@ -129,7 +129,7 @@ def generate_solweig( Path.mkdir(out_path / "shadows", parents=True, exist_ok=True) Path.mkdir(out_path / "Tmrt", parents=True, exist_ok=True) - dsm, dsm_transf, dsm_crs, _dsm_nd = common.load_raster(dsm_path, bbox) + dsm, dsm_transf, dsm_crs, _dsm_nd = common.load_raster(dsm_path, bbox, coerce_f64_to_f32=True) dsm_trf_affine = Affine.from_gdal(*dsm_transf) dsm_scale = 1 / dsm_transf[1] dsm_height, dsm_width = dsm.shape # y rows by x cols @@ -152,7 +152,9 @@ def generate_solweig( if veg_dsm_path is not None: usevegdem = 1 - veg_dsm, veg_dsm_transf, veg_dsm_crs, _veg_dsm_nd = common.load_raster(veg_dsm_path, bbox) + veg_dsm, veg_dsm_transf, veg_dsm_crs, _veg_dsm_nd = common.load_raster( + veg_dsm_path, bbox, coerce_f64_to_f32=True + ) veg_dsm_height, veg_dsm_width = veg_dsm.shape if not (veg_dsm_width == dsm_width) & (veg_dsm_height == dsm_height): raise ValueError("Error in Vegetation Canopy DSM: All rasters must be of same extent and resolution") @@ -182,23 +184,23 @@ def generate_solweig( zip.extractall(temp_dir) zip.close() - svf, _, _, _ = common.load_raster(temp_dir + "/svf.tif", bbox) - svfN, _, _, _ = common.load_raster(temp_dir + "/svfN.tif", bbox) - svfS, _, _, _ = common.load_raster(temp_dir + "/svfS.tif", bbox) - svfE, _, _, _ = common.load_raster(temp_dir + "/svfE.tif", bbox) - svfW, _, _, _ = common.load_raster(temp_dir + "/svfW.tif", bbox) + svf, _, _, _ = common.load_raster(temp_dir + "/svf.tif", bbox, coerce_f64_to_f32=True) + svfN, _, _, _ = common.load_raster(temp_dir + "/svfN.tif", bbox, coerce_f64_to_f32=True) + svfS, _, _, _ = common.load_raster(temp_dir + "/svfS.tif", bbox, coerce_f64_to_f32=True) + svfE, _, _, _ = common.load_raster(temp_dir + "/svfE.tif", bbox, coerce_f64_to_f32=True) + svfW, _, _, _ = common.load_raster(temp_dir + "/svfW.tif", bbox, coerce_f64_to_f32=True) if usevegdem == 1: - svfveg, _, _, _ = common.load_raster(temp_dir + "/svfveg.tif", bbox) - svfNveg, _, _, _ = common.load_raster(temp_dir + "/svfNveg.tif", bbox) - svfSveg, _, _, _ = common.load_raster(temp_dir + "/svfSveg.tif", bbox) - svfEveg, _, _, _ = common.load_raster(temp_dir + "/svfEveg.tif", bbox) - svfWveg, _, _, _ = common.load_raster(temp_dir + "/svfWveg.tif", bbox) - svfaveg, _, _, _ = common.load_raster(temp_dir + "/svfaveg.tif", bbox) - svfNaveg, _, _, _ = common.load_raster(temp_dir + "/svfNaveg.tif", bbox) - svfSaveg, _, _, _ = common.load_raster(temp_dir + "/svfSaveg.tif", bbox) - svfEaveg, _, _, _ = common.load_raster(temp_dir + "/svfEaveg.tif", bbox) - svfWaveg, _, _, _ = common.load_raster(temp_dir + "/svfWaveg.tif", bbox) + svfveg, _, _, _ = common.load_raster(temp_dir + "/svfveg.tif", bbox, coerce_f64_to_f32=True) + svfNveg, _, _, _ = common.load_raster(temp_dir + "/svfNveg.tif", bbox, coerce_f64_to_f32=True) + svfSveg, _, _, _ = common.load_raster(temp_dir + "/svfSveg.tif", bbox, coerce_f64_to_f32=True) + svfEveg, _, _, _ = common.load_raster(temp_dir + "/svfEveg.tif", bbox, coerce_f64_to_f32=True) + svfWveg, _, _, _ = common.load_raster(temp_dir + "/svfWveg.tif", bbox, coerce_f64_to_f32=True) + svfaveg, _, _, _ = common.load_raster(temp_dir + "/svfaveg.tif", bbox, coerce_f64_to_f32=True) + svfNaveg, _, _, _ = common.load_raster(temp_dir + "/svfNaveg.tif", bbox, coerce_f64_to_f32=True) + svfSaveg, _, _, _ = common.load_raster(temp_dir + "/svfSaveg.tif", bbox, coerce_f64_to_f32=True) + svfEaveg, _, _, _ = common.load_raster(temp_dir + "/svfEaveg.tif", bbox, coerce_f64_to_f32=True) + svfWaveg, _, _, _ = common.load_raster(temp_dir + "/svfWaveg.tif", bbox, coerce_f64_to_f32=True) else: svfveg = np.ones((dsm_height, dsm_width)) svfNveg = np.ones((dsm_height, dsm_width)) @@ -218,11 +220,11 @@ def generate_solweig( tmp[tmp < 0.0] = 0.0 svfalfa = np.arcsin(np.exp(np.log(1.0 - tmp) / 2.0)) - wh_rast, wh_transf, wh_crs, _wh_nd = common.load_raster(wall_ht_path, bbox) + wh_rast, wh_transf, wh_crs, _wh_nd = common.load_raster(wall_ht_path, bbox, coerce_f64_to_f32=True) wh_height, wh_width = wh_rast.shape if not (wh_width == dsm_width) & (wh_height == dsm_height): raise ValueError("Error in Wall height raster: All rasters must be of same extent and resolution") - wa_rast, wa_transf, wa_crs, _wa_nd = common.load_raster(wall_aspect_path, bbox) + wa_rast, wa_transf, wa_crs, _wa_nd = common.load_raster(wall_aspect_path, bbox, coerce_f64_to_f32=True) wa_height, wa_width = wa_rast.shape if not (wa_width == dsm_width) & (wa_height == dsm_height): raise ValueError("Error in Wall aspect raster: All rasters must be of same extent and resolution") @@ -678,10 +680,7 @@ def generate_solweig( poi_results.append(result_row) common.save_raster( - out_path_str + "/Tmrt/Tmrt_" + time_code + ".tif", - Tmrt, - dsm_transf, - dsm_crs, + out_path_str + "/Tmrt/Tmrt_" + time_code + ".tif", Tmrt, dsm_transf, dsm_crs, coerce_f64_to_f32=True ) common.save_raster( out_path_str @@ -699,6 +698,7 @@ def generate_solweig( shadow, dsm_transf, dsm_crs, + coerce_f64_to_f32=True, ) # After the main loop, write all POI results to a single GPKG file with multi-index @@ -735,6 +735,6 @@ def generate_solweig( umep_df.to_csv(out_path_str + "/metforcing.csv") tmrtplot = tmrtplot / Ta.__len__() # fix average Tmrt instead of sum, 20191022 - common.save_raster(out_path_str + "/Tmrt_average.tif", tmrtplot, dsm_transf, dsm_crs) + common.save_raster(out_path_str + "/Tmrt_average.tif", tmrtplot, dsm_transf, dsm_crs, coerce_f64_to_f32=True) rmtree(temp_dir, ignore_errors=True) diff --git a/umep/tile_manager.py b/umep/tile_manager.py new file mode 100644 index 0000000..6bfc311 --- /dev/null +++ b/umep/tile_manager.py @@ -0,0 +1,355 @@ +""" +Tile manager for lazy loading of large raster datasets with overlaps. +Enables memory-efficient processing by loading data on-demand. +""" + +import logging +from dataclasses import dataclass + +import numpy as np + +from . import common + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + + +@dataclass +class Window: + """Simple window specification with row/col offset and dimensions.""" + + row_off: int + col_off: int + height: int + width: int + + def to_slices(self) -> tuple[slice, slice]: + """Convert to tuple of slices (row_slice, col_slice).""" + return ( + slice(self.row_off, self.row_off + self.height), + slice(self.col_off, self.col_off + self.width), + ) + + +@dataclass +class TileSpec: + """Specification for a tile with overlap.""" + + # Core tile bounds (without overlap) + row_start: int + row_end: int + col_start: int + col_end: int + # Full tile bounds (with overlap) + row_start_full: int + row_end_full: int + col_start_full: int + col_end_full: int + # Overlap sizes + overlap_top: int + overlap_bottom: int + overlap_left: int + overlap_right: int + + @property + def core_shape(self) -> tuple[int, int]: + """Shape of core tile (without overlap).""" + return (self.row_end - self.row_start, self.col_end - self.col_start) + + @property + def full_shape(self) -> tuple[int, int]: + """Shape of full tile (with overlap).""" + return (self.row_end_full - self.row_start_full, self.col_end_full - self.col_start_full) + + def core_slice(self) -> tuple[slice, slice]: + """Get slices for extracting core from full tile.""" + return ( + slice(self.overlap_top, self.overlap_top + self.core_shape[0]), + slice(self.overlap_left, self.overlap_left + self.core_shape[1]), + ) + + @property + def full_slice(self) -> tuple[slice, slice]: + """Get slices for full tile (with overlap).""" + return ( + slice(self.row_start_full, self.row_end_full), + slice(self.col_start_full, self.col_end_full), + ) + + @property + def read_window(self) -> Window: + """Get window for reading full tile (with overlap) from global raster.""" + return Window( + row_off=self.row_start_full, + col_off=self.col_start_full, + height=self.row_end_full - self.row_start_full, + width=self.col_end_full - self.col_start_full, + ) + + @property + def write_window(self) -> Window: + """Get window for writing core tile to global raster.""" + return Window( + row_off=self.row_start, + col_off=self.col_start, + height=self.row_end - self.row_start, + width=self.col_end - self.col_start, + ) + + +class TileManager: + """ + Manages tiled loading of raster data with overlaps for shadow calculations. + + The overlap is calculated based on amaxvalue (max building height) and pixel size + to ensure shadow calculations at tile edges are accurate. + """ + + def __init__( + self, + rows: int, + cols: int, + tile_size: int, + pixel_size: float, + buffer_dist: float = 150.0, + ): + """ + Initialize tile manager. + + Args: + rows: Total number of rows in raster + cols: Total number of columns in raster + tile_size: Size of tiles (without overlap) in pixels + pixel_size: Pixel size in meters + buffer_dist: Buffer distance in meters for overlap (default: 150.0) + """ + self.rows = rows + self.cols = cols + self.tile_size = tile_size + self.pixel_size = pixel_size + self.buffer_dist = buffer_dist + + # Calculate overlap in pixels based on buffer distance + overlap_pixels = int(np.ceil(buffer_dist / pixel_size)) + + # Ensure overlap doesn't exceed tile size + # self.overlap = min(overlap_pixels, tile_size // 2) + # For 150m buffer, we strictly want that coverage. If tile is too small, so be it. + self.overlap = overlap_pixels + + logger.info( + f"TileManager initialized: {rows}x{cols} raster, " + f"tile_size={tile_size}, overlap={self.overlap} pixels " + f"({self.overlap * pixel_size:.1f}m)" + ) + + # Pre-calculate tile specifications + self.tiles = self._generate_tiles() + logger.info(f"Generated {len(self.tiles)} tiles") + + def _generate_tiles(self) -> list[TileSpec]: + """Generate tile specifications with overlaps.""" + tiles = [] + + # Calculate number of tiles in each dimension + n_tiles_row = int(np.ceil(self.rows / self.tile_size)) + n_tiles_col = int(np.ceil(self.cols / self.tile_size)) + + for i in range(n_tiles_row): + for j in range(n_tiles_col): + # Core tile bounds + row_start = i * self.tile_size + row_end = min((i + 1) * self.tile_size, self.rows) + col_start = j * self.tile_size + col_end = min((j + 1) * self.tile_size, self.cols) + + # Calculate overlaps (bounded by raster edges) + overlap_top = self.overlap if i > 0 else 0 + overlap_bottom = self.overlap if row_end < self.rows else 0 + overlap_left = self.overlap if j > 0 else 0 + overlap_right = self.overlap if col_end < self.cols else 0 + + # Full tile bounds with overlap + row_start_full = max(0, row_start - overlap_top) + row_end_full = min(self.rows, row_end + overlap_bottom) + col_start_full = max(0, col_start - overlap_left) + col_end_full = min(self.cols, col_end + overlap_right) + + tiles.append( + TileSpec( + row_start=row_start, + row_end=row_end, + col_start=col_start, + col_end=col_end, + row_start_full=row_start_full, + row_end_full=row_end_full, + col_start_full=col_start_full, + col_end_full=col_end_full, + overlap_top=overlap_top, + overlap_bottom=overlap_bottom, + overlap_left=overlap_left, + overlap_right=overlap_right, + ) + ) + + return tiles + + def get_tile(self, tile_idx: int) -> TileSpec: + """Get tile specification by index.""" + return self.tiles[tile_idx] + + @property + def total_tiles(self) -> int: + """Get total number of tiles.""" + return len(self.tiles) + + def get_tiles(self) -> list[TileSpec]: + """Get list of all tiles.""" + return self.tiles + + +class LazyRasterLoader: + """ + Lazy loader for raster data that loads tiles on demand. + """ + + def __init__( + self, + raster_path: str, + tile_manager: TileManager, + band: int = 0, + coerce_f64_to_f32: bool = True, + ): + """ + Initialize lazy raster loader. + + Args: + raster_path: Path to raster file + tile_manager: TileManager instance + band: Band index to read + coerce_f64_to_f32: Convert float64 to float32 + """ + self.raster_path = raster_path + self.tile_manager = tile_manager + self.band = band + self.coerce_f64_to_f32 = coerce_f64_to_f32 + + # Load metadata only + self._load_metadata() + + # Cache for loaded tiles + self._tile_cache = {} + + def _load_metadata(self): + """Load raster metadata without reading data.""" + meta = common.get_raster_metadata(self.raster_path) + self.shape = (meta["rows"], meta["cols"]) + # We assume float32 if coercing, otherwise we'd need to check meta['dtype'] if we exposed it + self.dtype = np.float32 if self.coerce_f64_to_f32 else np.float64 # Simplified assumption + self.trf_arr = meta["transform"] + self.crs_wkt = meta["crs"] + self.nd_val = meta["nodata"] + + # Verify shape matches tile manager + if self.shape != (self.tile_manager.rows, self.tile_manager.cols): + raise ValueError( + f"Raster shape {self.shape} doesn't match tile manager " + f"({self.tile_manager.rows}, {self.tile_manager.cols})" + ) + + def load_tile(self, tile_idx: int, use_cache: bool = True) -> np.ndarray: + """ + Load a specific tile with overlap. + + Args: + tile_idx: Index of tile to load + use_cache: Whether to use cached tile if available + + Returns: + Tile data with overlap as numpy array + """ + if use_cache and tile_idx in self._tile_cache: + return self._tile_cache[tile_idx] + + tile_spec = self.tile_manager.get_tile(tile_idx) + + window = ( + slice(tile_spec.row_start_full, tile_spec.row_end_full), + slice(tile_spec.col_start_full, tile_spec.col_end_full), + ) + + # Load tile with window + tile_data = common.read_raster_window( + self.raster_path, + window=window, + band=self.band, + ) + + if self.coerce_f64_to_f32 and tile_data.dtype == np.float64: + tile_data = tile_data.astype(np.float32) + + # Ensure contiguous array + tile_data = np.ascontiguousarray(tile_data, dtype=np.float32) + + if use_cache: + self._tile_cache[tile_idx] = tile_data + + return tile_data + + def load_full_raster(self) -> np.ndarray: + """ + Load entire raster (for compatibility with non-tiled code). + Warning: This defeats the purpose of lazy loading! + """ + logger.warning("Loading full raster - this may use significant memory!") + full_data, _, _, _ = common.load_raster( + self.raster_path, + bbox=None, + band=self.band, + coerce_f64_to_f32=self.coerce_f64_to_f32, + ) + return np.ascontiguousarray(full_data, dtype=np.float32) + + def clear_cache(self): + """Clear tile cache to free memory.""" + self._tile_cache.clear() + logger.debug(f"Cleared tile cache for {self.raster_path}") + + +class TiledRasterData: + """ + Container for tiled raster data that mimics the interface of regular numpy arrays + while loading tiles on demand. + """ + + def __init__(self, lazy_loader: LazyRasterLoader): + """ + Initialize tiled raster data. + + Args: + lazy_loader: LazyRasterLoader instance + """ + self.loader = lazy_loader + self.tile_manager = lazy_loader.tile_manager + self.shape = lazy_loader.shape + self.dtype = lazy_loader.dtype + + def get_tile(self, tile_idx: int) -> tuple[np.ndarray, TileSpec]: + """ + Get tile data and specification. + + Returns: + Tuple of (tile_data, tile_spec) + """ + tile_data = self.loader.load_tile(tile_idx) + tile_spec = self.tile_manager.get_tile(tile_idx) + return tile_data, tile_spec + + def to_array(self) -> np.ndarray: + """Convert to full numpy array (loads all data).""" + return self.loader.load_full_raster() + + def __getitem__(self, key): + """Support basic indexing (loads full raster).""" + logger.warning("Direct indexing loads full raster - consider using get_tile()") + return self.to_array()[key] diff --git a/umep/wall_heightaspect_algorithm.py b/umep/wall_heightaspect_algorithm.py index b9b36aa..7d4b6cb 100644 --- a/umep/wall_heightaspect_algorithm.py +++ b/umep/wall_heightaspect_algorithm.py @@ -58,7 +58,7 @@ def generate_wall_hts( wall_limit: float = 1, ): """ """ - dsm_rast, dsm_transf, dsm_crs, _dsm_nd = common.load_raster(dsm_path, bbox) + dsm_rast, dsm_transf, dsm_crs, _dsm_nd = common.load_raster(dsm_path, bbox, coerce_f64_to_f32=True) dsm_scale = 1 / dsm_transf[1] out_path = Path(out_dir) @@ -66,7 +66,7 @@ def generate_wall_hts( out_path_str = str(out_path) walls = wa.findwalls(dsm_rast, wall_limit) - common.save_raster(out_path_str + "/" + "wall_hts.tif", walls, dsm_transf, dsm_crs) + common.save_raster(out_path_str + "/" + "wall_hts.tif", walls, dsm_transf, dsm_crs, coerce_f64_to_f32=True) dirwalls = wa.filter1Goodwin_as_aspect_v3(walls, dsm_scale, dsm_rast) - common.save_raster(out_path_str + "/" + "wall_aspects.tif", dirwalls, dsm_transf, dsm_crs) + common.save_raster(out_path_str + "/" + "wall_aspects.tif", dirwalls, dsm_transf, dsm_crs, coerce_f64_to_f32=True)