Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion example.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-

from polymer.main import run_atm_corr, Level1, Level2
from polymer.main import run_atm_corr
from polymer.level1 import Level1
from polymer.level2 import Level2
from polymer.level2_hdf import Level2_HDF
from polymer.level2_nc import Level2_NETCDF
from polymer.level1_ascii import Level1_ASCII
Expand Down
226 changes: 226 additions & 0 deletions polymer/copernicus_dem.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,226 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-

from __future__ import print_function, division, absolute_import
import ssl
import certifi
from urllib.request import urlopen
import numpy as np
import os
import os.path
from glob import glob
import sys
import xarray as xr

copernicus_30m_dem_url_prefix = "https://copernicus-dem-30m.s3.eu-central-1.amazonaws.com"
copernicus_90m_dem_url_prefix = "https://copernicus-dem-90m.s3.eu-central-1.amazonaws.com"

class CopernicusDEM(object):
"""
Copernicus digital elevation model, 90m or 30m, with functions
to get the altitude above the geoid for a lat-lon grid and
to download on demand DEM tiles via internet.

CopernicusDEM().get(lat, lon)
where lat and lon are array-like returns an array-like alt in m

The DEM is a grid of tie points, not of pixels. The altitude is given for
the degree line. There are 1200 rows per degree. The bottom row is missing
as it is part of the next tile. The number of colums depends on the
latitude. There are 1200 columns per tile at the equator. From 50 degrees
onwards it is 800 columns, and so on.

Longitude spacing Latitude spacing width
0 - 50 1x 1200
50 - 60 1.5x 800
60 - 70 2x 600
70 - 80 3x 400
80 - 85 5x 240
85 - 90 10x 120

DEM rows are ordered from North to South. The first line is the North-most line.
DEM columns are ordered from West to East.

Naming convention for the 1 degree tiles in COP format, example:

Copernicus_DSM_COG_30_N54_00_E009_00_DEM.tif

Upper Left ( 8.9993750, 55.0004167) ( 8d59'57.75"E, 55d 0' 1.50"N)
Lower Left ( 8.9993750, 54.0004167) ( 8d59'57.75"E, 54d 0' 1.50"N)
Upper Right ( 9.9993750, 55.0004167) ( 9d59'57.75"E, 55d 0' 1.50"N)
Lower Right ( 9.9993750, 54.0004167) ( 9d59'57.75"E, 54d 0' 1.50"N)

The name denotes the lower left corner. S and W are like minus signs. S16 has its lower edge at -16.0 deg.
"""
def __init__(self,
directory='auxdata-Copernicus-90m-Global-DEM',
resolution=90,
missing=0.0,
verbose=False,
with_download=True):
"""
Memorises DEM parameters

:param directory: where to find and store downloaded tiles, must exist
:param resolution: 90 or 30, default 90
:param missing: what to provide in case of missing value
* a float, e.g. 0.0 (default)
* None : raise an error
:param verbose: with trace output
:param with_download: whether to allow download of missing tiles, default True
"""
self.cache_directory = directory
self.resolution = resolution
self.verbose = verbose
self.missing = missing
if resolution == 90:
self.url_prefix = copernicus_90m_dem_url_prefix
elif resolution == 30:
self.url_prefix = copernicus_30m_dem_url_prefix
else:
raise ValueError("resolution " + str(resolution) + " does not match Copernicus DEM. 90 or 30 expected.")
if not os.path.exists(directory):
raise IOError('Directory "{}" does not exist'.format(directory))
self.with_download = with_download
if self.with_download:
self.available = self._list_available_tiles()
if self.verbose:
print(str(len(self.available)-1) + " remote DEM tiles existing")
if self.verbose:
local = glob(os.path.join(self.cache_directory, "*.tif"))
print(str(len(local)) + " local DEM tiles available")

def get(self, lat, lon):
"""
Reads Copernicus DEM data for an array (lat, lon), downloads tiles on demand
"""
# initialise altitude
altitude = np.empty(lat.shape, np.float32)
altitude[:] = np.nan
# round lat and lon, encode as bin index per pixel of 360 cols and 180 rows starting at -90/-180
# (A pixel center less than half a pixel above the degree line is covered by this tile)
# (A pixel center less than half a pixel left of the degree line is covered by this tile)
tile_height = 1200 * 90 / self.resolution
half_pixel_height = 0.5 / tile_height
rows = np.floor(np.array(lat) - half_pixel_height).astype(np.int32)
tile_width = np.empty(lat.shape, dtype=np.int16)
tile_width[:] = tile_height
tile_width[(rows>=50)|(rows<=-50)] = tile_height * 2 // 3
tile_width[(rows>=60)|(rows<=-60)] = tile_height // 2
tile_width[(rows>=70)|(rows<=-70)] = tile_height // 3
tile_width[(rows>=80)|(rows<=-80)] = tile_height // 5
tile_width[(rows>=85)|(rows<=-85)] = tile_height // 10
half_pixel_width = 0.5 / tile_width
cols = np.floor((np.array(lon) + half_pixel_width + 180.0) % 360.0 - 180.0).astype(np.int32)
bin_index = (rows + 90) * 360 + cols + 180
del rows, cols
# determine set of different bin cells
bin_set = np.unique(bin_index)
# loop over 1-deg bin cells required to cover grid
for bin in bin_set:
# determine lat and lon of lower left corner of bin cell
row = bin // 360 - 90
col = bin % 360 - 180
# ensure DEM tile is available locally
local_path = self._download_tile(row, col)
if local_path is None:
continue
# read file content
if self.verbose:
print("reading DEM tile " + local_path)
with xr.open_dataset(local_path, engine="rasterio") as ds:
dem = ds.band_data.values[0]
# transfer content subset into target altitude
is_inside_tile = (bin_index == bin)
# (A pixel center less than half a pixel above the degree line is covered by this tile)
# (A pixel center less than half a pixel left of the degree line is covered by this tile)
dem_row = (((row + 1) - np.array(lat)[is_inside_tile] + half_pixel_height) * tile_height).astype(np.int32)
dem_col = (((np.array(lon)[is_inside_tile] - col + half_pixel_width[is_inside_tile]) % 360.0) * tile_width[is_inside_tile]).astype(np.int32)
altitude[is_inside_tile] = dem[(dem_row, dem_col)]
del dem_row, dem_col, is_inside_tile, dem
del bin_index, half_pixel_width, tile_width
# fill in missing values
if self.missing is not None: # assuming float
altitude[np.isnan(altitude)] = self.missing
assert not np.isnan(altitude).any()
#self._write_dem_tile_to_file(alt, lat, lon)
return altitude

def fetch_all(self):
lat, lon = np.meshgrid(np.linspace(-90, 90, 180), np.linspace(-180, 180, 360))
self.get(lat, lon)


def _list_available_tiles(self):
list_filename = "tileList.txt"
local_list_path = os.path.join(self.cache_directory, list_filename)
if not os.path.exists(local_list_path):
url = "{}/{}".format(self.url_prefix, list_filename)
if self.verbose:
print('downloading... ', end='')
sys.stdout.flush()
with urlopen(url, context=ssl.create_default_context(cafile=certifi.where())) as response:
content = response.read()
tmp_path = local_list_path + '.tmp'
with open(tmp_path, 'wb') as fp:
fp.write(content)
os.rename(tmp_path, local_list_path)
if self.verbose:
print(list_filename)
with open(local_list_path) as fp:
return fp.read().split("\n")

def _download_tile(self, row, col):
"""
Checks availability and downloads one DEM tile and stores it
in directory specified as constructor parameter.

:param row: degrees north, -90..89
:param col: degrees east, -180..179
:return local path to DEM file downloaded or existing, None if there is no such tile
"""
# Copernicus_DSM_COG_30_N00_00_E006_00_DEM.tif
dem_filename = "Copernicus_DSM_COG_{}_{}{:02d}_00_{}{:03d}_00_DEM.tif".format(
self.resolution // 3, # arc seconds
"N" if row >= 0 else "S",
np.abs(row),
"E" if col >= 0 else "W",
np.abs(col)
)
local_path = os.path.join(self.cache_directory, dem_filename)
# shortcuts
if os.path.exists(local_path):
return local_path
if not self.with_download or dem_filename[:-len(".tif")] not in self.available:
return None
# download
# https://.../Copernicus_DSM_COG_30_N00_00_E006_00_DEM/Copernicus_DSM_COG_30_N00_00_E006_00_DEM.tif
url = "{}/{}/{}".format(self.url_prefix, dem_filename[:-len(".tif")], dem_filename)
if self.verbose:
print('downloading... ', end='')
sys.stdout.flush()
with urlopen(url, context=ssl.create_default_context(cafile=certifi.where())) as response:
content = response.read()
tmp_path = local_path + '.tmp'
with open(tmp_path, 'wb') as fp:
fp.write(content)
os.rename(tmp_path, local_path)
if self.verbose:
print(dem_filename)
return local_path

def _write_dem_tile_to_file(self, alt, lat, lon):
# test output
alt_da = xr.DataArray(alt, dims=['y', 'x'], attrs={"long_name": "altitude above geoid in meters"})
lat_da = xr.DataArray(lat, dims=['y', 'x'], attrs={"standard_name": "latitude"})
lon_da = xr.DataArray(lon, dims=['y', 'x'], attrs={"standard_name": "longitude"})
ds = xr.Dataset({"alt": alt_da},
coords={"lat": lat_da, "lon": lon_da},
attrs={"description": "tile with nearest-interpolated DEM data"})
filename = "interpolated_dem_" + str(lat[0,0]) + "_" + str(lon[0,0]) + ".nc"
ds.to_netcdf(filename)

if __name__ == "__main__":
# download all to the directory provided in argument
directory = sys.argv[1]
CopernicusDEM(directory=directory, verbose=True).fetch_all()
2 changes: 1 addition & 1 deletion polymer/level1.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def autodetect(self):
elif self.detect_msi():
self.sensor = 'msi'

elif b.startswith('LC8'):
elif b.startswith('LC8') or b.startswith('LC08'):
self.sensor = 'landsat8'

else:
Expand Down
108 changes: 104 additions & 4 deletions polymer/level1_netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from polymer.block import Block
from netCDF4 import Dataset
import numpy as np
import xarray as xr
from warnings import warn
from datetime import datetime
from polymer.common import L2FLAGS
Expand Down Expand Up @@ -61,7 +62,10 @@ def __init__(self, filename,
try:
title = self.root.getncattr('title')
except AttributeError:
title = self.root.getncattr('product_type')
try:
title = self.root.getncattr('product_type')
except AttributeError:
title = self.root.getncattr('type')

if 'MERIS' in title:
self.sensor = 'MERIS'
Expand Down Expand Up @@ -128,9 +132,27 @@ def __init__(self, filename,
}

# get platform name
metadata = self.root.variables['metadata']
self.platform = metadata.getncattr('Level-1C_User_Product:General_Info:Product_Info:Datatake:SPACECRAFT_NAME')
if 'metadata' in self.root.variables:
metadata = self.root.variables['metadata']
self.platform = metadata.getncattr('Level-1C_User_Product:General_Info:Product_Info:Datatake:SPACECRAFT_NAME')
else:
self.platform = self.root.getncattr('platform')
self.platform = self.platform.replace('entinel-', '')

elif title == "MSIL1C":
self.sensor = 'MSI'
self.varnames = {
'latitude': 'lat',
'longitude': 'lon',
'SZA': 'sun_zenith',
'VZA': 'view_zenith_mean',
'SAA': 'sun_azimuth',
'VAA': 'view_azimuth_mean',
}

# get platform name
self.platform = self.root.getncattr('platform')

else:
raise Exception('Could not identify sensor from "{}"'.format(title))

Expand All @@ -153,7 +175,9 @@ def __init__(self, filename,
self.ancillary = Ancillary_NASA()
else:
self.ancillary = ancillary
if self.ancillary is not None:
if self.ancillary == 'ECMWFT':
self.init_ancillary_embedded()
elif self.ancillary is not None:
self.init_ancillary()

self.init_bands()
Expand Down Expand Up @@ -217,6 +241,40 @@ def init_ancillary(self):
self.ancillary_files.update(self.wind_speed.filename)
self.ancillary_files.update(self.surf_press.filename)

def init_ancillary_embedded(self):
#lat = self.read_band("aux_latitude", (9,9), (0,0))
#lon = self.read_band("aux_longitude", (9,9), (0,0))
lat = np.array(self.root.variables["aux_latitude"])
lon = np.array(self.root.variables["aux_longitude"])
if '_0u' in self.root.variables:
u10 = self.read_band("_0u", (9,9), (0,0))
v10 = self.read_band("_0u", (9,9), (0,0))
else:
u10 = self.read_band("u10", (9,9), (0,0))
v10 = self.read_band("v10", (9,9), (0,0))
tco3 = self.read_band("tco3", (9,9), (0,0))
msl = self.read_band("msl", (9,9), (0,0))
wind_speed = np.sqrt(u10 * u10 + v10 * v10)
#assert tco3.units == 'kg m**-2'
ozone = tco3 / 2.1415e-5 # convert kg/m2 to DU
#assert msl.units == 'Pa'
surf_press = msl / 100
#coord_lat = xr.DataArray(lat[:,0], dims=['latitude'])
#coord_lon = xr.DataArray(lon[0], dims=['longitude'])
coord_lat = xr.DataArray(lat, dims=['latitude'])
coord_lon = xr.DataArray(lon, dims=['longitude'])
self.wind_speed = xr.DataArray(wind_speed,
coords={'latitude': coord_lat, 'longitude': coord_lon},
dims=['latitude', 'longitude'])
self.ozone = xr.DataArray(ozone,
coords={'latitude': coord_lat, 'longitude': coord_lon},
dims=['latitude', 'longitude'],
attrs={'units': "DU"})
self.surf_press = xr.DataArray(surf_press,
coords={'latitude': coord_lat, 'longitude': coord_lon},
dims=['latitude', 'longitude'],
attrs={'units': "hPa"})

def read_date(self, date):
''' parse a date in the format 04-JUL-2017 12:31:28.013924 '''
date = date.replace('JAN', '01')
Expand Down Expand Up @@ -419,6 +477,48 @@ def read_block(self, size, offset, bands):
mwind = self.read_band(self.varnames['mwind'], size, offset)
block.wind_speed = np.sqrt(zwind**2 + mwind**2)
P0 = self.read_band(self.varnames['press'], size, offset)
elif self.sensor == 'MSI' and self.ancillary == 'ECMWFT':
# ancillary data embedded in Level1
if 90.0 <= (self.wind_speed.longitude[0] + 180.0) % 360.0 < 270.0:
# we are between -90 and 90 degrees
wind_speed = self.wind_speed
ozone = self.ozone
surf_press = self.surf_press
latitude=xr.DataArray(block.latitude)
longitude=xr.DataArray(block.longitude)
else:
# lon to be turned by 180 degrees for interpolation
# lon to be sorted, see https://github.com/ecmwf/cfgrib/issues/402
coord_lat = self.wind_speed.latitude
coord_lon = xr.DataArray(np.sort(self.wind_speed.longitude.data % 360.0 - 180.0), dims=['longitude'])
wind_speed = xr.DataArray(self.wind_speed,
coords={'latitude': coord_lat, 'longitude': coord_lon},
dims=['latitude', 'longitude'])
ozone = xr.DataArray(self.ozone,
coords={'latitude': coord_lat, 'longitude': coord_lon},
dims=['latitude', 'longitude'],
attrs={'units': "DU"})
surf_press = xr.DataArray(self.surf_press,
coords={'latitude': coord_lat, 'longitude': coord_lon},
dims=['latitude', 'longitude'],
attrs={'units': "hPa"})
latitude=xr.DataArray(block.latitude)
longitude=xr.DataArray(block.longitude % 360.0 - 180.0)
block.ozone = ozone.interp(
latitude=latitude,
longitude=longitude,
# method='nearest'
).values
block.wind_speed = wind_speed.interp(
latitude=latitude,
longitude=longitude,
# method='nearest'
).values
P0 = surf_press.interp(
latitude=latitude,
longitude=longitude,
# method='nearest'
).values
elif self.sensor == 'MSI':
block.ozone = self.ozone[block.latitude, block.longitude]
block.wind_speed = self.wind_speed[block.latitude, block.longitude]
Expand Down
Loading