From d73b82314553d7020cc6704f27873e00ebb92206 Mon Sep 17 00:00:00 2001 From: martin-boettcher Date: Wed, 3 Jul 2024 21:27:43 +0200 Subject: [PATCH 01/11] support NetCDF format for cloud mask input (cherry picked from commit 07266591cc49fd488b8cea02691219cac9ef5d4e) --- polymer/params.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/polymer/params.py b/polymer/params.py index 788f673..b548b77 100644 --- a/polymer/params.py +++ b/polymer/params.py @@ -839,7 +839,11 @@ def preprocess(self, l1): # initialize external mask # if self.external_mask is not None: - if isinstance(self.external_mask, str): + if isinstance(self.external_mask, str) and self.external_mask.endswith(".nc"): + import xarray as xr + with xr.open_dataset(self.external_mask) as ds: + self.external_mask = ds["mask"].values + elif isinstance(self.external_mask, str): # read external mask hdf = SD(self.external_mask) self.external_mask = hdf.select('mask').get() From 7ab81db475e3767f41772a6fcc377107f3396bc5 Mon Sep 17 00:00:00 2001 From: martin-boettcher Date: Wed, 3 Jul 2024 21:31:52 +0200 Subject: [PATCH 02/11] fix lut interpolation in case of non-finite inputs (cherry picked from commit 8dba76df0eb93fa084fde1cbea6e14303be49c84) --- polymer/luts.py | 1 + 1 file changed, 1 insertion(+) diff --git a/polymer/luts.py b/polymer/luts.py index 478aa83..1a9c8c0 100644 --- a/polymer/luts.py +++ b/polymer/luts.py @@ -432,6 +432,7 @@ def __getitem__(self, keys): interpolate = True inf = k.astype('int') inf[inf == self.data.shape[i]-1] -= 1 + inf[np.isfinite(k) == 0] = 0 x = k-inf if k.ndim > 0: x = x.reshape(shp_res) From 5c97935a43df91fb1eae33b77f00ae2306ac3b2c Mon Sep 17 00:00:00 2001 From: martin-boettcher Date: Wed, 3 Jul 2024 21:30:43 +0200 Subject: [PATCH 03/11] support msiresampled input with different metadata attributes and ancillary band names (cherry picked from commit 9ca5008b86d02c8a82428437bccb8cf70b31baa7) --- polymer/level1_netcdf.py | 83 ++++++++++++++++++++++++++++++++++++++-- 1 file changed, 79 insertions(+), 4 deletions(-) diff --git a/polymer/level1_netcdf.py b/polymer/level1_netcdf.py index 89e21d8..c34f26a 100644 --- a/polymer/level1_netcdf.py +++ b/polymer/level1_netcdf.py @@ -6,6 +6,7 @@ from polymer.block import Block from netCDF4 import Dataset import numpy as np +import xarray as xr from warnings import warn from datetime import datetime from polymer.common import L2FLAGS @@ -61,7 +62,10 @@ def __init__(self, filename, try: title = self.root.getncattr('title') except AttributeError: - title = self.root.getncattr('product_type') + try: + title = self.root.getncattr('product_type') + except AttributeError: + title = self.root.getncattr('type') if 'MERIS' in title: self.sensor = 'MERIS' @@ -128,9 +132,27 @@ def __init__(self, filename, } # get platform name - metadata = self.root.variables['metadata'] - self.platform = metadata.getncattr('Level-1C_User_Product:General_Info:Product_Info:Datatake:SPACECRAFT_NAME') + if 'metadata' in self.root.variables: + metadata = self.root.variables['metadata'] + self.platform = metadata.getncattr('Level-1C_User_Product:General_Info:Product_Info:Datatake:SPACECRAFT_NAME') + else: + self.platform = self.root.getncattr('platform') self.platform = self.platform.replace('entinel-', '') + + elif title == "MSIL1C": + self.sensor = 'MSI' + self.varnames = { + 'latitude': 'lat', + 'longitude': 'lon', + 'SZA': 'sun_zenith', + 'VZA': 'view_zenith_mean', + 'SAA': 'sun_azimuth', + 'VAA': 'view_azimuth_mean', + } + + # get platform name + self.platform = self.root.getncattr('platform') + else: raise Exception('Could not identify sensor from "{}"'.format(title)) @@ -153,7 +175,9 @@ def __init__(self, filename, self.ancillary = Ancillary_NASA() else: self.ancillary = ancillary - if self.ancillary is not None: + if self.ancillary == 'ECMWFT': + self.init_ancillary_embedded() + elif self.ancillary is not None: self.init_ancillary() self.init_bands() @@ -217,6 +241,40 @@ def init_ancillary(self): self.ancillary_files.update(self.wind_speed.filename) self.ancillary_files.update(self.surf_press.filename) + def init_ancillary_embedded(self): + #lat = self.read_band("aux_latitude", (9,9), (0,0)) + #lon = self.read_band("aux_longitude", (9,9), (0,0)) + lat = np.array(self.root.variables["aux_latitude"]) + lon = np.array(self.root.variables["aux_longitude"]) + if '_0u' in self.root.variables: + u10 = self.read_band("_0u", (9,9), (0,0)) + v10 = self.read_band("_0u", (9,9), (0,0)) + else: + u10 = self.read_band("u10", (9,9), (0,0)) + v10 = self.read_band("v10", (9,9), (0,0)) + tco3 = self.read_band("tco3", (9,9), (0,0)) + msl = self.read_band("msl", (9,9), (0,0)) + wind_speed = np.sqrt(u10 * u10 + v10 * v10) + #assert tco3.units == 'kg m**-2' + ozone = tco3 / 2.1415e-5 # convert kg/m2 to DU + #assert msl.units == 'Pa' + surf_press = msl / 100 + #coord_lat = xr.DataArray(lat[:,0], dims=['latitude']) + #coord_lon = xr.DataArray(lon[0], dims=['longitude']) + coord_lat = xr.DataArray(lat, dims=['latitude']) + coord_lon = xr.DataArray(lon, dims=['longitude']) + self.wind_speed = xr.DataArray(wind_speed, + coords={'latitude': coord_lat, 'longitude': coord_lon}, + dims=['latitude', 'longitude']) + self.ozone = xr.DataArray(ozone, + coords={'latitude': coord_lat, 'longitude': coord_lon}, + dims=['latitude', 'longitude'], + attrs={'units': "DU"}) + self.surf_press = xr.DataArray(surf_press, + coords={'latitude': coord_lat, 'longitude': coord_lon}, + dims=['latitude', 'longitude'], + attrs={'units': "hPa"}) + def read_date(self, date): ''' parse a date in the format 04-JUL-2017 12:31:28.013924 ''' date = date.replace('JAN', '01') @@ -419,6 +477,23 @@ def read_block(self, size, offset, bands): mwind = self.read_band(self.varnames['mwind'], size, offset) block.wind_speed = np.sqrt(zwind**2 + mwind**2) P0 = self.read_band(self.varnames['press'], size, offset) + elif self.sensor == 'MSI' and self.ancillary == 'ECMWFT': + # ancillary data embedded in Level1 + block.ozone = self.ozone.interp( + latitude=xr.DataArray(block.latitude), + longitude=xr.DataArray(block.longitude), + # method='nearest' + ).values + block.wind_speed = self.wind_speed.interp( + latitude=xr.DataArray(block.latitude), + longitude=xr.DataArray(block.longitude), + # method='nearest' + ).values + P0 = self.surf_press.interp( + latitude=xr.DataArray(block.latitude), + longitude=xr.DataArray(block.longitude), + # method='nearest' + ).values elif self.sensor == 'MSI': block.ozone = self.ozone[block.latitude, block.longitude] block.wind_speed = self.wind_speed[block.latitude, block.longitude] From 31413033d71702abceeccc46df04b4181866a7e6 Mon Sep 17 00:00:00 2001 From: martin-boettcher Date: Thu, 11 Jul 2024 19:33:55 +0200 Subject: [PATCH 04/11] fix multiprocessing to release output file before it is renamed --- polymer/main.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/polymer/main.py b/polymer/main.py index 116cf7d..7ef4dfa 100644 --- a/polymer/main.py +++ b/polymer/main.py @@ -525,11 +525,12 @@ def run_atm_corr(level1, level2, **kwargs): params.processing_duration = datetime.now()-t0 params.update(**l1.attributes('%Y-%m-%d %H:%M:%S')) params.update(**l2.attributes()) - l2.finish(params) if params.multiprocessing != 0: pool.terminate() + l2.finish(params) + if params.verbose: print('Done in {}'.format(datetime.now()-t0)) From ce596b964b89fdb78f8810d7e3f80fd4920cd1bb Mon Sep 17 00:00:00 2001 From: martin-boettcher Date: Fri, 12 Jul 2024 10:24:41 +0200 Subject: [PATCH 05/11] implement Copernicus 90m and 30m DEM support --- polymer/copernicus_dem.py | 226 +++++++++++++++++++++++++++++++++++ tests/test_copernicus_dem.py | 35 ++++++ 2 files changed, 261 insertions(+) create mode 100644 polymer/copernicus_dem.py create mode 100644 tests/test_copernicus_dem.py diff --git a/polymer/copernicus_dem.py b/polymer/copernicus_dem.py new file mode 100644 index 0000000..59cd4e0 --- /dev/null +++ b/polymer/copernicus_dem.py @@ -0,0 +1,226 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from __future__ import print_function, division, absolute_import +import ssl +import certifi +from urllib.request import urlopen +import numpy as np +import os +import os.path +from glob import glob +import sys +import xarray as xr + +copernicus_30m_dem_url_prefix = "https://copernicus-dem-30m.s3.eu-central-1.amazonaws.com" +copernicus_90m_dem_url_prefix = "https://copernicus-dem-90m.s3.eu-central-1.amazonaws.com" + +class CopernicusDEM(object): + """ + Copernicus digital elevation model, 90m or 30m, with functions + to get the altitude above the geoid for a lat-lon grid and + to download on demand DEM tiles via internet. + + CopernicusDEM().get(lat, lon) + where lat and lon are array-like returns an array-like alt in m + + The DEM is a grid of tie points, not of pixels. The altitude is given for + the degree line. There are 1200 rows per degree. The bottom row is missing + as it is part of the next tile. The number of colums depends on the + latitude. There are 1200 columns per tile at the equator. From 50 degrees + onwards it is 800 columns, and so on. + + Longitude spacing Latitude spacing width + 0 - 50 1x 1200 + 50 - 60 1.5x 800 + 60 - 70 2x 600 + 70 - 80 3x 400 + 80 - 85 5x 240 + 85 - 90 10x 120 + + DEM rows are ordered from North to South. The first line is the North-most line. + DEM columns are ordered from West to East. + + Naming convention for the 1 degree tiles in COP format, example: + + Copernicus_DSM_COG_30_N54_00_E009_00_DEM.tif + + Upper Left ( 8.9993750, 55.0004167) ( 8d59'57.75"E, 55d 0' 1.50"N) + Lower Left ( 8.9993750, 54.0004167) ( 8d59'57.75"E, 54d 0' 1.50"N) + Upper Right ( 9.9993750, 55.0004167) ( 9d59'57.75"E, 55d 0' 1.50"N) + Lower Right ( 9.9993750, 54.0004167) ( 9d59'57.75"E, 54d 0' 1.50"N) + + The name denotes the lower left corner. S and W are like minus signs. + """ + def __init__(self, + directory='auxdata-Copernicus-90m-Global-DEM', + resolution=90, + missing=0.0, + verbose=False, + with_download=True): + """ + Memorises DEM parameters + + :param directory: where to find and store downloaded tiles, must exist + :param resolution: 90 or 30, default 90 + :param missing: what to provide in case of missing value + * a float, e.g. 0.0 (default) + * None : raise an error + :param verbose: with trace output + :param with_download: whether to allow download of missing tiles, default True + """ + self.cache_directory = directory + self.resolution = resolution + self.verbose = verbose + self.missing = missing + if resolution == 90: + self.url_prefix = copernicus_90m_dem_url_prefix + elif resolution == 30: + self.url_prefix = copernicus_30m_dem_url_prefix + else: + raise ValueError("resolution " + str(resolution) + " does not match Copernicus DEM. 90 or 30 expected.") + if not os.path.exists(directory): + raise IOError('Directory "{}" does not exist'.format(directory)) + self.with_download = with_download + if self.with_download: + self.available = self._list_available_tiles() + if self.verbose: + print(str(len(self.available)-1) + " remote DEM tiles existing") + if self.verbose: + local = glob(os.path.join(self.cache_directory, "*.tif")) + print(str(len(local)) + " local DEM tile available") + + def get(self, lat, lon): + """ + Reads Copernicus DEM data for an array (lat, lon), downloads tiles on demand + """ + # initialise altitude + altitude = np.empty(lat.shape, np.float32) + altitude[:] = np.nan + # round lat and lon, encode as bin index per pixel of 360 cols and 180 rows starting at -90/-180 + # (A pixel center less than half a pixel above the degree line is covered by this tile) + # (A pixel center less than half a pixel left of the degree line is covered by this tile) + tile_height = 1200 * 90 / self.resolution + half_pixel_height = 0.5 / tile_height + rows = np.floor(np.array(lat) - half_pixel_height).astype(np.int32) + tile_width = np.empty(lat.shape, dtype=np.int16) + tile_width[:] = tile_height + tile_width[(rows>50)|(rows<-50)] = tile_height * 2 // 3 + tile_width[(rows>60)|(rows<-60)] = tile_height // 2 + tile_width[(rows>70)|(rows<-70)] = tile_height // 3 + tile_width[(rows>80)|(rows<-80)] = tile_height // 5 + tile_width[(rows>85)|(rows<-85)] = tile_height // 10 + half_pixel_width = 0.5 / tile_width + cols = np.floor(np.array(lon) + half_pixel_width).astype(np.int32) + bin_index = (rows + 90) * 360 + cols + 180 + del rows, cols + # determine set of different bin cells + bin_set = np.unique(bin_index) + # loop over 1-deg bin cells required to cover grid + for bin in bin_set: + # determine lat and lon of lower left corner of bin cell + row = bin // 360 - 90 + col = bin % 360 - 180 + # ensure DEM tile is available locally + local_path = self._download_tile(row, col) + if local_path is None: + continue + # read file content + if self.verbose: + print("reading DEM tile " + local_path) + with xr.open_dataset(local_path, engine="rasterio") as ds: + dem = ds.band_data.values[0] + # transfer content subset into target altitude + is_inside_tile = (bin_index == bin) + # (A pixel center less than half a pixel above the degree line is covered by this tile) + # (A pixel center less than half a pixel left of the degree line is covered by this tile) + dem_row = (((row + 1) - np.array(lat)[is_inside_tile] + half_pixel_height) * tile_height).astype(np.int32) + dem_col = ((np.array(lon)[is_inside_tile] - col + half_pixel_width[is_inside_tile]) * tile_width[is_inside_tile]).astype(np.int32) + altitude[is_inside_tile] = dem[(dem_row, dem_col)] + del dem_row, dem_col, is_inside_tile, dem + del bin_index, half_pixel_width, tile_width + # fill in missing values + if self.missing is not None: # assuming float + altitude[np.isnan(altitude)] = self.missing + assert not np.isnan(altitude).any() + #self._write_dem_tile_to_file(alt, lat, lon) + return altitude + + def fetch_all(self): + lat, lon = np.meshgrid(np.linspace(-90, 90, 180), np.linspace(-180, 180, 360)) + self.get(lat, lon) + + + def _list_available_tiles(self): + list_filename = "tileList.txt" + local_list_path = os.path.join(self.cache_directory, list_filename) + if not os.path.exists(local_list_path): + url = "{}/{}".format(self.url_prefix, list_filename) + if self.verbose: + print('downloading... ', end='') + sys.stdout.flush() + with urlopen(url, context=ssl.create_default_context(cafile=certifi.where())) as response: + content = response.read() + tmp_path = local_list_path + '.tmp' + with open(tmp_path, 'wb') as fp: + fp.write(content) + os.rename(tmp_path, local_list_path) + if self.verbose: + print(list_filename) + with open(local_list_path) as fp: + return fp.read().split("\n") + + def _download_tile(self, row, col): + """ + Checks availability and downloads one DEM tile and stores it + in directory specified as constructor parameter. + + :param row: degrees north, -90..89 + :param col: degrees east, -180..179 + :return local path to DEM file downloaded or existing, None if there is no such tile + """ + # Copernicus_DSM_COG_30_N00_00_E006_00_DEM.tif + dem_filename = "Copernicus_DSM_COG_{}_{}{:02d}_00_{}{:03d}_00_DEM.tif".format( + self.resolution // 3, # arc seconds + "N" if row >= 0 else "S", + np.abs(row), + "E" if col >= 0 else "W", + np.abs(col) + ) + local_path = os.path.join(self.cache_directory, dem_filename) + # shortcuts + if os.path.exists(local_path): + return local_path + if not self.with_download or dem_filename[:-len(".tif")] not in self.available: + return None + # download + # https://.../Copernicus_DSM_COG_30_N00_00_E006_00_DEM/Copernicus_DSM_COG_30_N00_00_E006_00_DEM.tif + url = "{}/{}/{}".format(self.url_prefix, dem_filename[:-len(".tif")], dem_filename) + if self.verbose: + print('downloading... ', end='') + sys.stdout.flush() + with urlopen(url, context=ssl.create_default_context(cafile=certifi.where())) as response: + content = response.read() + tmp_path = local_path + '.tmp' + with open(tmp_path, 'wb') as fp: + fp.write(content) + os.rename(tmp_path, local_path) + if self.verbose: + print(dem_filename) + return local_path + + def _write_dem_tile_to_file(self, alt, lat, lon): + # test output + alt_da = xr.DataArray(alt, dims=['y', 'x'], attrs={"long_name": "altitude above geoid in meters"}) + lat_da = xr.DataArray(lat, dims=['y', 'x'], attrs={"standard_name": "latitude"}) + lon_da = xr.DataArray(lon, dims=['y', 'x'], attrs={"standard_name": "longitude"}) + ds = xr.Dataset({"alt": alt_da}, + coords={"lat": lat_da, "lon": lon_da}, + attrs={"description": "tile with nearest-interpolated DEM data"}) + filename = "interpolated_dem_" + str(lat[0,0]) + "_" + str(lon[0,0]) + ".nc" + ds.to_netcdf(filename) + +if __name__ == "__main__": + # download all to the directory provided in argument + directory = sys.argv[1] + CopernicusDEM(directory=directory, verbose=True).fetch_all() diff --git a/tests/test_copernicus_dem.py b/tests/test_copernicus_dem.py new file mode 100644 index 0000000..c903d03 --- /dev/null +++ b/tests/test_copernicus_dem.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import numpy as np +from tempfile import TemporaryDirectory +from polymer.copernicus_dem import CopernicusDEM + + +def test_download(): + with TemporaryDirectory() as tmpdir: + cop_dem = CopernicusDEM(directory=tmpdir, verbose=True) + local_path = cop_dem._download_tile(-1, 34) + assert local_path.endswith("Copernicus_DSM_COG_30_S01_00_E034_00_DEM.tif") + local_path = cop_dem._download_tile(-1, 34) + +def test_get(): + lat = np.tile(np.arange(0, -1.05, -0.1), (11, 1)).T + lon = np.tile(np.arange(34.0, 35.05, 0.1), (11, 1)) + with TemporaryDirectory() as tmpdir: + cop_dem = CopernicusDEM(directory=tmpdir) + alt = cop_dem.get(lat, lon) + assert np.abs(lat[5, 5] - (-0.5)) < 0.0001 + assert np.abs(lon[5, 5] - 34.5) < 0.0001 + assert np.abs(alt[5, 5] - 1175.619384765625) < 0.0001 + +def test_get_55(): + lat = np.tile(np.arange(56, 54.95, -0.1), (11, 1)).T + lon = np.tile(np.arange(11.0, 12.05, 0.1), (11, 1)) + with TemporaryDirectory() as tmpdir: + cop_dem = CopernicusDEM(directory=tmpdir) + alt = cop_dem.get(lat, lon) + assert np.abs(lat[5, 5] - (55.5)) < 0.0001 + assert np.abs(lon[5, 5] - 11.5) < 0.0001 + assert np.abs(alt[5, 5] - 30.285972595214844) < 0.0001 + From f1ac79c8170c42c024d6ade9799f782e8d9e576d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois=20Steinmetz?= Date: Tue, 11 Jun 2024 17:07:19 +0200 Subject: [PATCH 06/11] Fix autodetection of landsat L1 products --- polymer/level1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/polymer/level1.py b/polymer/level1.py index 4d6b5bd..7fcb010 100644 --- a/polymer/level1.py +++ b/polymer/level1.py @@ -57,7 +57,7 @@ def autodetect(self): elif self.detect_msi(): self.sensor = 'msi' - elif b.startswith('LC8'): + elif b.startswith('LC8') or b.startswith('LC08'): self.sensor = 'landsat8' else: From b1665c03932be49fca883e54f0d8a3e9cb7f3cdd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois=20Steinmetz?= Date: Tue, 11 Jun 2024 17:52:35 +0200 Subject: [PATCH 07/11] Fix exemple.py --- example.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/example.py b/example.py index 42eac11..896337c 100644 --- a/example.py +++ b/example.py @@ -1,7 +1,9 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -from polymer.main import run_atm_corr, Level1, Level2 +from polymer.main import run_atm_corr +from polymer.level1 import Level1 +from polymer.level2 import Level2 from polymer.level2_hdf import Level2_HDF from polymer.level2_nc import Level2_NETCDF from polymer.level1_ascii import Level1_ASCII From c15d7e38abf3ac4f403ef8ae8d48645fd063b8a1 Mon Sep 17 00:00:00 2001 From: martin-boettcher Date: Fri, 12 Jul 2024 16:38:48 +0200 Subject: [PATCH 08/11] fix typo in trace output --- polymer/copernicus_dem.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/polymer/copernicus_dem.py b/polymer/copernicus_dem.py index 59cd4e0..1219fef 100644 --- a/polymer/copernicus_dem.py +++ b/polymer/copernicus_dem.py @@ -88,7 +88,7 @@ def __init__(self, print(str(len(self.available)-1) + " remote DEM tiles existing") if self.verbose: local = glob(os.path.join(self.cache_directory, "*.tif")) - print(str(len(local)) + " local DEM tile available") + print(str(len(local)) + " local DEM tiles available") def get(self, lat, lon): """ From 93a70bd9e5a0c6278ea9c53595ec9f9587734667 Mon Sep 17 00:00:00 2001 From: martin-boettcher Date: Fri, 19 Jul 2024 07:32:05 +0200 Subject: [PATCH 09/11] fix steps of Copernicus DEM latitudes --- polymer/copernicus_dem.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/polymer/copernicus_dem.py b/polymer/copernicus_dem.py index 1219fef..6094dcc 100644 --- a/polymer/copernicus_dem.py +++ b/polymer/copernicus_dem.py @@ -105,11 +105,11 @@ def get(self, lat, lon): rows = np.floor(np.array(lat) - half_pixel_height).astype(np.int32) tile_width = np.empty(lat.shape, dtype=np.int16) tile_width[:] = tile_height - tile_width[(rows>50)|(rows<-50)] = tile_height * 2 // 3 - tile_width[(rows>60)|(rows<-60)] = tile_height // 2 - tile_width[(rows>70)|(rows<-70)] = tile_height // 3 - tile_width[(rows>80)|(rows<-80)] = tile_height // 5 - tile_width[(rows>85)|(rows<-85)] = tile_height // 10 + tile_width[(rows>=50)|(rows<=-50)] = tile_height * 2 // 3 + tile_width[(rows>=60)|(rows<=-60)] = tile_height // 2 + tile_width[(rows>=70)|(rows<=-70)] = tile_height // 3 + tile_width[(rows>=80)|(rows<=-80)] = tile_height // 5 + tile_width[(rows>=85)|(rows<=-85)] = tile_height // 10 half_pixel_width = 0.5 / tile_width cols = np.floor(np.array(lon) + half_pixel_width).astype(np.int32) bin_index = (rows + 90) * 360 + cols + 180 From d7b9f08c3ebafe17fb5546a0e04e0539048eff22 Mon Sep 17 00:00:00 2001 From: martin-boettcher Date: Tue, 17 Sep 2024 12:42:19 +0200 Subject: [PATCH 10/11] fix anti-meridian issue in Copernicus DEM access --- polymer/copernicus_dem.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/polymer/copernicus_dem.py b/polymer/copernicus_dem.py index 6094dcc..a6ad35e 100644 --- a/polymer/copernicus_dem.py +++ b/polymer/copernicus_dem.py @@ -50,7 +50,7 @@ class CopernicusDEM(object): Upper Right ( 9.9993750, 55.0004167) ( 9d59'57.75"E, 55d 0' 1.50"N) Lower Right ( 9.9993750, 54.0004167) ( 9d59'57.75"E, 54d 0' 1.50"N) - The name denotes the lower left corner. S and W are like minus signs. + The name denotes the lower left corner. S and W are like minus signs. S16 has its lower edge at -16.0 deg. """ def __init__(self, directory='auxdata-Copernicus-90m-Global-DEM', @@ -111,7 +111,7 @@ def get(self, lat, lon): tile_width[(rows>=80)|(rows<=-80)] = tile_height // 5 tile_width[(rows>=85)|(rows<=-85)] = tile_height // 10 half_pixel_width = 0.5 / tile_width - cols = np.floor(np.array(lon) + half_pixel_width).astype(np.int32) + cols = np.floor((np.array(lon) + half_pixel_width + 180.0) % 360.0 - 180.0).astype(np.int32) bin_index = (rows + 90) * 360 + cols + 180 del rows, cols # determine set of different bin cells @@ -135,7 +135,7 @@ def get(self, lat, lon): # (A pixel center less than half a pixel above the degree line is covered by this tile) # (A pixel center less than half a pixel left of the degree line is covered by this tile) dem_row = (((row + 1) - np.array(lat)[is_inside_tile] + half_pixel_height) * tile_height).astype(np.int32) - dem_col = ((np.array(lon)[is_inside_tile] - col + half_pixel_width[is_inside_tile]) * tile_width[is_inside_tile]).astype(np.int32) + dem_col = (((np.array(lon)[is_inside_tile] - col + half_pixel_width[is_inside_tile]) % 360.0) * tile_width[is_inside_tile]).astype(np.int32) altitude[is_inside_tile] = dem[(dem_row, dem_col)] del dem_row, dem_col, is_inside_tile, dem del bin_index, half_pixel_width, tile_width From 21fce853febad2cc52e9eead7ca98ae0819d54c2 Mon Sep 17 00:00:00 2001 From: martin-boettcher Date: Tue, 17 Sep 2024 12:43:48 +0200 Subject: [PATCH 11/11] preliminary fix for anti-meridian issue in ECMWF ancillary data interpolation with NetCDF input as in Sen2Water --- polymer/level1_netcdf.py | 43 +++++++++++++++++++++++++++++++--------- 1 file changed, 34 insertions(+), 9 deletions(-) diff --git a/polymer/level1_netcdf.py b/polymer/level1_netcdf.py index c34f26a..159c6dc 100644 --- a/polymer/level1_netcdf.py +++ b/polymer/level1_netcdf.py @@ -479,19 +479,44 @@ def read_block(self, size, offset, bands): P0 = self.read_band(self.varnames['press'], size, offset) elif self.sensor == 'MSI' and self.ancillary == 'ECMWFT': # ancillary data embedded in Level1 - block.ozone = self.ozone.interp( - latitude=xr.DataArray(block.latitude), - longitude=xr.DataArray(block.longitude), + if 90.0 <= (self.wind_speed.longitude[0] + 180.0) % 360.0 < 270.0: + # we are between -90 and 90 degrees + wind_speed = self.wind_speed + ozone = self.ozone + surf_press = self.surf_press + latitude=xr.DataArray(block.latitude) + longitude=xr.DataArray(block.longitude) + else: + # lon to be turned by 180 degrees for interpolation + # lon to be sorted, see https://github.com/ecmwf/cfgrib/issues/402 + coord_lat = self.wind_speed.latitude + coord_lon = xr.DataArray(np.sort(self.wind_speed.longitude.data % 360.0 - 180.0), dims=['longitude']) + wind_speed = xr.DataArray(self.wind_speed, + coords={'latitude': coord_lat, 'longitude': coord_lon}, + dims=['latitude', 'longitude']) + ozone = xr.DataArray(self.ozone, + coords={'latitude': coord_lat, 'longitude': coord_lon}, + dims=['latitude', 'longitude'], + attrs={'units': "DU"}) + surf_press = xr.DataArray(self.surf_press, + coords={'latitude': coord_lat, 'longitude': coord_lon}, + dims=['latitude', 'longitude'], + attrs={'units': "hPa"}) + latitude=xr.DataArray(block.latitude) + longitude=xr.DataArray(block.longitude % 360.0 - 180.0) + block.ozone = ozone.interp( + latitude=latitude, + longitude=longitude, # method='nearest' ).values - block.wind_speed = self.wind_speed.interp( - latitude=xr.DataArray(block.latitude), - longitude=xr.DataArray(block.longitude), + block.wind_speed = wind_speed.interp( + latitude=latitude, + longitude=longitude, # method='nearest' ).values - P0 = self.surf_press.interp( - latitude=xr.DataArray(block.latitude), - longitude=xr.DataArray(block.longitude), + P0 = surf_press.interp( + latitude=latitude, + longitude=longitude, # method='nearest' ).values elif self.sensor == 'MSI':