Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
f51fd0a
truncate orbit reference epoch to seconds precision; update ephemeris
gshiroma Jan 24, 2025
9abd98f
replace `np.string_` with `np.bytes_`
gshiroma Jan 24, 2025
079e3db
fix the assignment of the `orbitType` in the metadata
gshiroma Jan 27, 2025
41110f0
remove option to change the geogrid upsampling
gshiroma Jan 28, 2025
5057b7c
update dependency requirements
gshiroma Jan 28, 2025
3df8d67
add fill value for byte datasets; handle multiband rasters
gshiroma Jan 28, 2025
ec04095
fix logic to save the layover/shadow mask
gshiroma Jan 28, 2025
2937604
improve unit tests
gshiroma Jan 28, 2025
b6627c7
bump software version to v1.0.4
gshiroma Apr 28, 2025
7d142a8
Merge branch 'opera-adt:main' into updates_2025
gshiroma Apr 29, 2025
be34866
Merge branch 'opera-adt:main' into updates_2025
gshiroma Apr 29, 2025
d6ea2b7
update isce3 (0.24.3) and s1reader(0.2.5)
May 2, 2025
d92318a
remove
May 2, 2025
215bf07
Merge branch 'opera-adt:main' into s1c_update
oberonia78 May 2, 2025
54c45fe
fix test
May 2, 2025
9e6c1e9
revert base image for test
May 2, 2025
e70c24e
test
May 5, 2025
74befc5
update isce3
May 6, 2025
e8084e0
test docker
May 6, 2025
f5a4bd9
set `epsg_out` when calling Rdr2Geo
gshiroma May 17, 2025
5f21d52
ensure that the to compute the layover/shadow mask matches the DEM's…
gshiroma May 19, 2025
0aeb624
update base image
May 21, 2025
4fa88d8
replace sh with bash
May 21, 2025
4d822ad
fix bugs
May 21, 2025
278e284
revert the lockfile
May 22, 2025
3e270f8
Merge pull request #91 from gshiroma/updates_2025
gshiroma May 22, 2025
bc35b0f
Merge branch 'main' into fix_layover_shadow_mask_antimeridian
gshiroma May 22, 2025
14f7514
Merge branch 'main' into s1c_update
oberonia78 May 22, 2025
836e3f4
revert changes to h5_prep.py
gshiroma May 22, 2025
fd8c79b
disable pytest for s1reader
May 22, 2025
0e0bdee
Merge branch 's1c_update' of https://github.com/oberonia78/RTC into s…
May 22, 2025
4848d3c
run CI only for RTC
May 23, 2025
76da242
refine the ci test
May 23, 2025
18a7ade
refine the ci test
May 23, 2025
4080ac1
refine the ci test
May 23, 2025
3369d2e
refine the ci test
May 23, 2025
ef0e3d3
fix typo
May 23, 2025
b8e3aba
fix typo
May 23, 2025
8fe11e5
improving the downloading lines
May 23, 2025
9ecd918
Merge pull request #93 from oberonia78/s1c_update
oberonia78 May 23, 2025
c5cc740
Merge pull request #96 from gshiroma/fix_layover_shadow_mask_antimeri…
gshiroma May 23, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 10 additions & 7 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,30 +41,33 @@ jobs:

# Set the conda environment up using Mamba and install dependencies
- name: Setup Environment
uses: mamba-org/setup-micromamba@main
uses: mamba-org/setup-micromamba@v2
with:
environment-file: ${{ matrix.env-file.file }}
environment-name: RTC
init-shell: bash # let the action touch ~/.bash_profile
log-level: debug # shows solver problems early
post-cleanup: none

# Install the S1-Reader OPERA-ADT project.
- name: Install S1-Reader
run: |
curl -sSL \
https://github.com/opera-adt/s1-reader/archive/refs/tags/v0.2.2.tar.gz \
https://github.com/opera-adt/s1-reader/archive/refs/tags/v0.2.5.tar.gz \
-o s1_reader_src.tar.gz \
&& tar -xvf s1_reader_src.tar.gz \
&& ln -s s1-reader-0.2.2 s1-reader \
&& ln -s s1-reader-0.2.5 s1-reader \
&& rm s1_reader_src.tar.gz \
&& python -m pip install ./s1-reader
&& python -m pip install -e ./s1-reader

# Setup the project
- name: Install Project
run: python -m pip install .

# Test the project.
# # Test the project.
- name: Test Project
run: |
pytest -vrpP
pytest -vrpP tests/

build_docker:

Expand Down Expand Up @@ -100,4 +103,4 @@ jobs:
# Build the image.
- name: Build docker image
run: |
sh build_docker_image.sh
bash build_docker_image.sh
13 changes: 9 additions & 4 deletions Docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
FROM oraclelinux:8
ARG BASE_IMAGE=oraclelinux:8.8

FROM ${BASE_IMAGE}

# ARG is out of scope here unless re-declared
ARG BASE_IMAGE

LABEL author="OPERA ADT" \
description="RTC cal/val release R4" \
version="1.0.3-final"
version="1.0.4-final"

RUN yum -y update &&\
yum -y install curl &&\
Expand Down Expand Up @@ -37,9 +42,9 @@ SHELL ["conda", "run", "-n", "RTC", "/bin/bash", "-c"]
WORKDIR /home/rtc_user/OPERA

# installing OPERA s1-reader
RUN curl -sSL https://github.com/isce-framework/s1-reader/archive/refs/tags/v0.2.4.tar.gz -o s1_reader_src.tar.gz &&\
RUN curl -sSL https://github.com/isce-framework/s1-reader/archive/refs/tags/v0.2.5.tar.gz -o s1_reader_src.tar.gz &&\
tar -xvf s1_reader_src.tar.gz &&\
ln -s s1-reader-0.2.4 s1-reader &&\
ln -s s1-reader-0.2.5 s1-reader &&\
rm s1_reader_src.tar.gz &&\
python -m pip install ./s1-reader

Expand Down
6 changes: 3 additions & 3 deletions Docker/environment.yml
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
name: rtc_s1_sas_final
channels:
- conda-forge
- nodefaults
dependencies:
- python>=3.9,<3.10
- gdal>=3.0
- s1reader>=0.2.4
- s1reader>=0.2.5
- numpy>=1.20
- pybind11>=2.5
- pyre>=1.11.2

- scipy!=1.10.0
- isce3==0.15.0
# Workaround for the issue with `libabseil` (09/11/2023)
- libabseil=20230125.3
- libabseil=20230125.3
3 changes: 1 addition & 2 deletions Docker/lockfile.lock
Original file line number Diff line number Diff line change
Expand Up @@ -155,5 +155,4 @@ https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.7.1-h880a63b_9.conda
https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda
https://conda.anaconda.org/conda-forge/linux-64/gdal-3.7.1-py39h41b90d8_9.conda
https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py39h6183b62_0.conda
https://conda.anaconda.org/conda-forge/linux-64/isce3-0.15.0-py39h431996e_0.conda

https://conda.anaconda.org/conda-forge/linux-64/isce3-0.15.0-py39h431996e_0.conda
9 changes: 6 additions & 3 deletions Docker/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
python>=3.9,<3.10
python
cmake>=3.18
eigen>=3.3
fftw>=3.3
Expand All @@ -16,9 +16,12 @@ pyre>=1.11.2
pytest
cython
ruamel.yaml
scipy!=1.10.0
scipy
setuptools
shapely
yamale
backoff
isce3==0.15.0
isce3
libnetcdf
libgdal-hdf5
libgdal-netcdf
14 changes: 11 additions & 3 deletions build_docker_image.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,16 @@

REPO=opera
IMAGE=rtc
TAG=final_1.0.3
TAG=final_1.0.4

docker_build_args=(--rm --force-rm --network host -t $REPO/$IMAGE:$TAG -f Docker/Dockerfile)

if [ $# -eq 0 ]; then
echo "Base image was not specified. Using the default image specified in the Dockerfile."
else
echo "Using $1 as the base image."
docker_build_args+=(--build-arg BASE_IMAGE=$1)
fi

echo "IMAGE is $REPO/$IMAGE:$TAG"

Expand All @@ -13,8 +22,7 @@ set -ex
python3 setup.py sdist

# build image
docker build --rm --force-rm --network host -t $REPO/$IMAGE:$TAG -f Docker/Dockerfile .

docker build "${docker_build_args[@]}" .
# run tests - to be worked on when the RTC test module is in place
#docker run --rm -u "$(id -u):$(id -g)" -v "$PWD:/mnt" -w /mnt -it --network host "${IMAGE}:$t" pytest /mnt/tests/

Expand Down
3 changes: 0 additions & 3 deletions src/rtc/defaults/rtc_s1.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -197,9 +197,6 @@ runconfig:
# OPTIONAL - Choices: "single_block", "geogrid", "geogrid_and_radargrid", and "auto" (default)
memory_mode:

# OPTIONAL - Processing upsampling factor applied to input geogrid
geogrid_upsampling: 1

# Save the incidence angle
save_incidence_angle: False

Expand Down
3 changes: 0 additions & 3 deletions src/rtc/defaults/rtc_s1_static.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -198,9 +198,6 @@ runconfig:
# OPTIONAL - Choices: "single_block", "geogrid", "geogrid_and_radargrid", and "auto" (default)
memory_mode:

# OPTIONAL - Processing upsampling factor applied to input geogrid
geogrid_upsampling: 1

# Save the incidence angle
save_incidence_angle: True

Expand Down
76 changes: 53 additions & 23 deletions src/rtc/h5_prep.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ def save_hdf5_file(hdf5_obj, output_hdf5_file, clip_max,
del hdf5_obj[h5_ds]
pol_list_s2 = np.array(pol_list, dtype='S2')
dset = hdf5_obj.create_dataset(h5_ds, data=pol_list_s2)
dset.attrs['description'] = np.string_(
dset.attrs['description'] = np.bytes_(
'List of processed polarization layers')

# save geogrid coordinates
Expand Down Expand Up @@ -294,21 +294,21 @@ def create_hdf5_file(product_id, output_hdf5_file, orbit, burst, cfg,
'''

hdf5_obj = h5py.File(output_hdf5_file, 'w')
hdf5_obj.attrs['Conventions'] = np.string_("CF-1.8")
hdf5_obj.attrs["contact"] = np.string_("operasds@jpl.nasa.gov")
hdf5_obj.attrs["institution"] = np.string_("NASA JPL")
hdf5_obj.attrs["project"] = np.string_("OPERA")
hdf5_obj.attrs["reference_document"] = np.string_(
hdf5_obj.attrs['Conventions'] = np.bytes_("CF-1.8")
hdf5_obj.attrs["contact"] = np.bytes_("operasds@jpl.nasa.gov")
hdf5_obj.attrs["institution"] = np.bytes_("NASA JPL")
hdf5_obj.attrs["project"] = np.bytes_("OPERA")
hdf5_obj.attrs["reference_document"] = np.bytes_(
"Product Specification Document for the OPERA Radiometric"
" Terrain-Corrected SAR Backscatter from Sentinel-1,"
" JPL D-108758, Rev. Working Version 1, Aug 31, 2023")

# product type
product_type = cfg.groups.primary_executable.product_type
if product_type == STATIC_LAYERS_PRODUCT_TYPE:
hdf5_obj.attrs["title"] = np.string_("OPERA RTC-S1-STATIC Product")
hdf5_obj.attrs["title"] = np.bytes_("OPERA RTC-S1-STATIC Product")
else:
hdf5_obj.attrs["title"] = np.string_("OPERA RTC-S1 Product")
hdf5_obj.attrs["title"] = np.bytes_("OPERA RTC-S1 Product")

populate_metadata_group(product_id, hdf5_obj, burst, cfg,
processing_datetime, is_mosaic)
Expand All @@ -320,23 +320,44 @@ def create_hdf5_file(product_id, output_hdf5_file, orbit, burst, cfg,


def save_orbit(orbit, orbit_group, orbit_file_path):

# ensure that the orbit reference epoch has not fractional part
# otherwise, trancate it to seconds precision
orbit_reference_epoch = orbit.reference_epoch
if orbit_reference_epoch.frac != 0:
logger.warning('the orbit reference epoch is not an'
' integer number. Truncating it'
' to seconds precision and'
' updating the orbit ephemeris'
' accordingly.')

epoch = isce3.core.DateTime(orbit_reference_epoch.year,
orbit_reference_epoch.month,
orbit_reference_epoch.day,
orbit_reference_epoch.hour,
orbit_reference_epoch.minute,
orbit_reference_epoch.second)

orbit.update_reference_epoch(epoch)

orbit.save_to_h5(orbit_group)

# Add description attributes.
orbit_group["time"].attrs["description"] = np.string_(
orbit_group["time"].attrs["description"] = np.bytes_(
"Time vector record. This"
" record contains the time corresponding to position, velocity,"
" acceleration records")
orbit_group["position"].attrs["description"] = np.string_(
orbit_group["position"].attrs["description"] = np.bytes_(
"Position vector"
" record. This record contains the platform position data with"
" respect to WGS84 G1762 reference frame")
orbit_group["velocity"].attrs["description"] = np.string_(
orbit_group["velocity"].attrs["description"] = np.bytes_(
"Velocity vector"
" record. This record contains the platform velocity data with"
" respect to WGS84 G1762 reference frame")
orbit_group.create_dataset(
'referenceEpoch',
data=np.string_(orbit.reference_epoch.isoformat()))
data=np.bytes_(orbit.reference_epoch.isoformat()))

# Orbit source/type
orbit_type = 'Undefined'
Expand All @@ -359,9 +380,12 @@ def save_orbit(orbit, orbit_group, orbit_file_path):
orbit_type_list.append(orbit_type_individual)
orbit_type = '; '.join(orbit_type_list)

d = orbit_group.require_dataset("orbitType", (), "S64",
data=np.string_(orbit_type))
d.attrs["description"] = np.string_(
if 'orbitType' in orbit_group:
del orbit_group['orbitType']
d = orbit_group.create_dataset("orbitType",
data=np.bytes_(orbit_type))

d.attrs["description"] = np.bytes_(
"Type of orbit file used in processing")


Expand Down Expand Up @@ -1296,11 +1320,11 @@ def populate_metadata_group(product_id: str,
continue
if isinstance(data, str):
dset = h5py_obj.create_dataset(
path_dataset_in_h5, data=np.string_(data))
path_dataset_in_h5, data=np.bytes_(data))
else:
dset = h5py_obj.create_dataset(path_dataset_in_h5, data=data)

dset.attrs['description'] = np.string_(description)
dset.attrs['description'] = np.bytes_(description)


def save_hdf5_dataset(ds_filename, h5py_obj, root_path,
Expand Down Expand Up @@ -1341,7 +1365,11 @@ def save_hdf5_dataset(ds_filename, h5py_obj, root_path,
logger.warning(f'WARNING Cannot open raster file: {ds_filename}')
return

ds_name = layer_hdf5_dict[layer_name]
if isinstance(layer_name, str):
ds_name = layer_hdf5_dict[layer_name]
else:
ds_name = [layer_hdf5_dict[l] for l in layer_name]

if long_name is not None:
description = long_name
else:
Expand Down Expand Up @@ -1377,25 +1405,27 @@ def save_hdf5_dataset(ds_filename, h5py_obj, root_path,
dset = h5py_obj.create_dataset(h5_ds, data=data)
dset.dims[0].attach_scale(yds)
dset.dims[1].attach_scale(xds)
dset.attrs['grid_mapping'] = np.string_("projection")
dset.attrs['grid_mapping'] = np.bytes_("projection")

if standard_name is not None:
dset.attrs['standard_name'] = np.string_(standard_name)
dset.attrs['standard_name'] = np.bytes_(standard_name)

if long_name is not None:
dset.attrs['long_name'] = np.string_(long_name)
dset.attrs['long_name'] = np.bytes_(long_name)

dset.attrs['description'] = np.string_(description)
dset.attrs['description'] = np.bytes_(description)

if units is not None:
dset.attrs['units'] = np.string_(units)
dset.attrs['units'] = np.bytes_(units)

if fill_value is not None:
dset.attrs.create('_FillValue', data=fill_value)
elif 'cfloat' in gdal.GetDataTypeName(raster.datatype()).lower():
dset.attrs.create('_FillValue', data=np.nan + 1j * np.nan)
elif 'float' in gdal.GetDataTypeName(raster.datatype()).lower():
dset.attrs.create('_FillValue', data=np.nan)
elif 'byte' in gdal.GetDataTypeName(raster.datatype()).lower():
dset.attrs.create('_FillValue', data=255)

if stats_vector is not None:
stats_obj = stats_vector[band]
Expand Down
23 changes: 11 additions & 12 deletions src/rtc/rtc_s1.py
Original file line number Diff line number Diff line change
Expand Up @@ -769,22 +769,21 @@ def run_parallel(cfg: RunConfig, logfile_path, flag_logger_full_format):

if flag_layover_shadow_mask_is_temporary:
temp_files_list.append(layover_shadow_mask_file)
layover_shadow_mask_file = None
else:
burst_output_file_list.append(layover_shadow_mask_file)
logger.info(f'file saved: {layover_shadow_mask_file}')

# Take the layover shadow mask from HDF5 file if not exists
if save_secondary_layers_as_hdf5:
layover_shadow_mask_file = (
f'NETCDF:{burst_hdf5_in_output}:'
f'{DATA_BASE_GROUP}/'
f'{layer_hdf5_dict[LAYER_NAME_LAYOVER_SHADOW_MASK]}')

if save_mask:
output_metadata_dict[
LAYER_NAME_LAYOVER_SHADOW_MASK][1].append(
layover_shadow_mask_file)
# Take the layover shadow mask from HDF5 file if not exists
if save_secondary_layers_as_hdf5:
layover_shadow_mask_file = (
f'NETCDF:{burst_hdf5_in_output}:'
f'{DATA_BASE_GROUP}/'
f'{layer_hdf5_dict[LAYER_NAME_LAYOVER_SHADOW_MASK]}')

if save_mask:
output_metadata_dict[
LAYER_NAME_LAYOVER_SHADOW_MASK][1].append(
layover_shadow_mask_file)

if not save_mask:
layover_shadow_mask_file = None
Expand Down
Loading