From a1df5eab2d2f74bfdbe9e04553e7c13f49299bd4 Mon Sep 17 00:00:00 2001 From: Mohammad Mohseni Aref Date: Mon, 13 Oct 2025 22:48:09 +0200 Subject: [PATCH 1/5] Improve LiCSAR metadata preparation --- src/mintpy/load_data.py | 14 +-- src/mintpy/prep_licsar.py | 231 +++++++++++++++++++++++++++++--------- 2 files changed, 182 insertions(+), 63 deletions(-) diff --git a/src/mintpy/load_data.py b/src/mintpy/load_data.py index 4baffcefc..133e5d878 100644 --- a/src/mintpy/load_data.py +++ b/src/mintpy/load_data.py @@ -614,19 +614,7 @@ def prepare_metadata(iDict): # Import prep_{processor} prep_module = importlib.import_module(f'mintpy.cli.prep_{processor}') - if processor == 'licsar': - # Validate required files - metadata_file = os.path.join(os.path.dirname(iDict['mintpy.load.unwFile']), 'metadata.txt') - baseline_file = os.path.join(os.path.dirname(iDict['mintpy.load.unwFile']), 'baselines.txt') - if not os.path.exists(metadata_file) or not os.path.exists(baseline_file): - raise FileNotFoundError(f"Required LiCSAR files not found: {metadata_file}, {baseline_file}") - - # Run prep_licsar - iargs = [iDict['mintpy.load.unwFile'], metadata_file, baseline_file] - ut.print_command_line(script_name, iargs) - prep_module.main(iargs) - - elif processor in ['gamma', 'hyp3', 'roipac', 'snap', 'cosicorr']: + if processor in ['gamma', 'hyp3', 'roipac', 'snap', 'cosicorr', 'licsar']: # Run prep_module for key in [i for i in iDict.keys() if (i.startswith('mintpy.load.') diff --git a/src/mintpy/prep_licsar.py b/src/mintpy/prep_licsar.py index 68a6f37a1..9e04bff1b 100644 --- a/src/mintpy/prep_licsar.py +++ b/src/mintpy/prep_licsar.py @@ -6,12 +6,80 @@ import datetime as dt import os +import re +import warnings from mintpy.constants import SPEED_OF_LIGHT from mintpy.objects import sensor from mintpy.utils import readfile, utils1 as ut, writefile +def _safe_float(value): + """Convert *value* to float when possible.""" + + try: + return float(value) + except (TypeError, ValueError): + return None + + +def _parse_date(date_str): + """Return ``datetime.date`` from a LiCSAR date string.""" + + if not date_str: + return None + + digits = ''.join(ch for ch in str(date_str) if ch.isdigit()) + for length, fmt in ((8, '%Y%m%d'), (6, '%y%m%d')): + if len(digits) >= length: + try: + return dt.datetime.strptime(digits[:length], fmt).date() + except ValueError: + continue + return None + + +def _extract_slave_date_from_fname(fname, master_date): + """Infer the slave date from the file name when it is not present in metadata.""" + + name = os.path.basename(fname) + for match in re.findall(r'\d{6,8}', name): + candidate = _parse_date(match) + if candidate and candidate != master_date: + return candidate + return None + + +def _read_key_value_file(meta_file, separators=('=', ':')): + """Return a dictionary with lower-case keys from ``meta_file``.""" + + if not os.path.isfile(meta_file): + raise FileNotFoundError(f'LiCSAR metadata file not found: {meta_file}') + + info = {} + with open(meta_file) as f: + for line in f: + text = line.strip() + if not text or text.startswith('#'): + continue + + for sep in separators: + if sep in text: + key, value = text.split(sep, 1) + info[key.strip().lower()] = value.strip() + break + return info + + +def _get_first(info, *keys): + """Return the first available value from ``info`` using ``keys``.""" + + for key in keys: + if key and key.lower() in info: + return info[key.lower()] + return None + + ######################################################################### def add_licsar_metadata(fname, meta, is_ifg=True): '''Read/extract metadata from LiCSAR metadata and baseline files and add to metadata dictionary. @@ -30,71 +98,134 @@ def add_licsar_metadata(fname, meta, is_ifg=True): # Read metadata.txt meta_file = os.path.join(os.path.dirname(fname), 'metadata.txt') - licsar_meta = {} - with open(meta_file) as f: - for line in f: - key, value = line.strip().split('=') - licsar_meta[key.strip()] = value.strip() + licsar_meta = _read_key_value_file(meta_file) # Add universal metadata meta['PROCESSOR'] = 'LiCSAR' - meta['MASTER_DATE'] = licsar_meta.get('master', 'Unknown') - meta['CENTER_TIME'] = licsar_meta.get('center_time', 'Unknown') - meta['HEADING'] = float(licsar_meta.get('heading', '0')) % 360. - 360. # ensure negative value - meta['AVG_INCIDENCE_ANGLE'] = licsar_meta.get('avg_incidence_angle', 'Unknown') - meta['AZIMUTH_RESOLUTION'] = licsar_meta.get('azimuth_resolution', 'Unknown') - meta['RANGE_RESOLUTION'] = licsar_meta.get('range_resolution', 'Unknown') - meta['CENTRE_RANGE'] = licsar_meta.get('centre_range_m', 'Unknown') - meta['AVG_HEIGHT'] = licsar_meta.get('avg_height', 'Unknown') - meta['APPLIED_DEM'] = licsar_meta.get('applied_DEM', 'Unknown') + + master_date = _parse_date(_get_first(licsar_meta, 'master', 'masterdate')) + slave_date = _parse_date(_get_first(licsar_meta, 'slave', 'slavedate')) + if slave_date is None: + slave_date = _extract_slave_date_from_fname(fname, master_date) + + if master_date: + meta['MASTER_DATE'] = master_date.strftime('%Y%m%d') + if slave_date: + meta['SLAVE_DATE'] = slave_date.strftime('%Y%m%d') + + center_time = _get_first(licsar_meta, 'center_time', 'centre_time') + if center_time: + meta['CENTER_TIME'] = center_time + + heading = _safe_float(_get_first(licsar_meta, 'heading', 'track_heading')) + if heading is not None: + heading = heading % 360.0 + if heading > 180.0: + heading -= 360.0 + meta['HEADING'] = heading + + value_map = { + 'AVG_INCIDENCE_ANGLE': ['avg_incidence_angle', 'incidence_angle'], + 'AZIMUTH_RESOLUTION': ['azimuth_resolution', 'azimuth_pixel_spacing'], + 'RANGE_RESOLUTION': ['range_resolution', 'range_pixel_spacing'], + 'CENTRE_RANGE': ['centre_range_m', 'center_range', 'center_range_m'], + 'AVG_HEIGHT': ['avg_height', 'average_height'], + 'APPLIED_DEM': ['applied_dem', 'applied_dem_file', 'applied_DEM'], + 'ALOOKS': ['azimuth_looks', 'looks_azimuth'], + 'RLOOKS': ['range_looks', 'looks_range'], + } + for meta_key, candidates in value_map.items(): + value = _get_first(licsar_meta, *candidates) + if value is not None: + meta[meta_key] = value # Add orbit direction and corner coordinates - meta['ORBIT_DIRECTION'] = 'ASCENDING' if abs(meta['HEADING']) < 90 else 'DESCENDING' - N = float(meta['Y_FIRST']) - W = float(meta['X_FIRST']) - S = N + float(meta['Y_STEP']) * int(meta['LENGTH']) - E = W + float(meta['X_STEP']) * int(meta['WIDTH']) - - if meta['ORBIT_DIRECTION'] == 'ASCENDING': - meta['LAT_REF1'] = str(S) - meta['LAT_REF2'] = str(S) - meta['LAT_REF3'] = str(N) - meta['LAT_REF4'] = str(N) - meta['LON_REF1'] = str(W) - meta['LON_REF2'] = str(E) - meta['LON_REF3'] = str(W) - meta['LON_REF4'] = str(E) - else: - meta['LAT_REF1'] = str(N) - meta['LAT_REF2'] = str(N) - meta['LAT_REF3'] = str(S) - meta['LAT_REF4'] = str(S) - meta['LON_REF1'] = str(E) - meta['LON_REF2'] = str(W) - meta['LON_REF3'] = str(E) - meta['LON_REF4'] = str(W) + orbit_direction = _get_first(licsar_meta, 'orbit_direction', 'pass') + if orbit_direction: + meta['ORBIT_DIRECTION'] = orbit_direction.upper() + elif heading is not None: + meta['ORBIT_DIRECTION'] = 'ASCENDING' if abs(heading) < 90 else 'DESCENDING' + + try: + N = float(meta['Y_FIRST']) + W = float(meta['X_FIRST']) + S = N + float(meta['Y_STEP']) * int(meta['LENGTH']) + E = W + float(meta['X_STEP']) * int(meta['WIDTH']) + except (KeyError, TypeError, ValueError): + N = S = E = W = None + + if meta.get('ORBIT_DIRECTION') and None not in (N, S, E, W): + if meta['ORBIT_DIRECTION'] == 'ASCENDING': + meta['LAT_REF1'] = str(S) + meta['LAT_REF2'] = str(S) + meta['LAT_REF3'] = str(N) + meta['LAT_REF4'] = str(N) + meta['LON_REF1'] = str(W) + meta['LON_REF2'] = str(E) + meta['LON_REF3'] = str(W) + meta['LON_REF4'] = str(E) + else: + meta['LAT_REF1'] = str(N) + meta['LAT_REF2'] = str(N) + meta['LAT_REF3'] = str(S) + meta['LAT_REF4'] = str(S) + meta['LON_REF1'] = str(E) + meta['LON_REF2'] = str(W) + meta['LON_REF3'] = str(E) + meta['LON_REF4'] = str(W) # Hard-coded metadata for Sentinel-1 - meta['PLATFORM'] = 'Sentinel-1' + meta['PLATFORM'] = 'Sen' meta['ANTENNA_SIDE'] = -1 meta['WAVELENGTH'] = SPEED_OF_LIGHT / sensor.SEN['carrier_frequency'] - meta['RANGE_PIXEL_SIZE'] = sensor.SEN['range_pixel_size'] - meta['AZIMUTH_PIXEL_SIZE'] = sensor.SEN['azimuth_pixel_size'] + if 'RLOOKS' in meta: + looks = _safe_float(meta['RLOOKS']) + meta['RANGE_PIXEL_SIZE'] = sensor.SEN['range_pixel_size'] * looks if looks else sensor.SEN['range_pixel_size'] + else: + meta['RANGE_PIXEL_SIZE'] = sensor.SEN['range_pixel_size'] + if 'ALOOKS' in meta: + looks = _safe_float(meta['ALOOKS']) + meta['AZIMUTH_PIXEL_SIZE'] = sensor.SEN['azimuth_pixel_size'] * looks if looks else sensor.SEN['azimuth_pixel_size'] + else: + meta['AZIMUTH_PIXEL_SIZE'] = sensor.SEN['azimuth_pixel_size'] # Read baselines.txt for interferogram-specific metadata if is_ifg: baseline_file = os.path.join(os.path.dirname(fname), 'baselines.txt') - master_date = licsar_meta.get('master', 'Unknown') - slave_date = os.path.basename(fname).split('_')[1] # Extract slave date from filename + if not os.path.isfile(baseline_file): + raise FileNotFoundError(f'LiCSAR baseline file not found: {baseline_file}') + + pair_found = False with open(baseline_file) as f: for line in f: - fields = line.strip().split() - if fields[0] == master_date and fields[1] == slave_date: - meta['P_BASELINE_TOP_HDR'] = fields[2] - meta['P_BASELINE_BOTTOM_HDR'] = fields[3] - break - - meta['DATE12'] = f"{master_date[2:]}-{slave_date[2:]}" + text = line.strip() + if not text or text.startswith('#'): + continue + + fields = text.split() + if len(fields) < 4: + continue + + base_master = _parse_date(fields[0]) + base_slave = _parse_date(fields[1]) + if master_date and base_master and master_date != base_master: + continue + if slave_date and base_slave and slave_date != base_slave: + continue + + meta['P_BASELINE_TOP_HDR'] = fields[2] + meta['P_BASELINE_BOTTOM_HDR'] = fields[3] + pair_found = True + break + + if not pair_found: + warnings.warn( + f'Baseline info for {os.path.basename(fname)} was not found in {baseline_file}.', + category=UserWarning, + ) + + if master_date and slave_date: + meta['DATE12'] = f"{master_date.strftime('%y%m%d')}-{slave_date.strftime('%y%m%d')}" return meta From 6c3c00f7b9551a257d94341282c0c74af8ef7ef1 Mon Sep 17 00:00:00 2001 From: Mohammad Mohseni Aref Date: Wed, 15 Oct 2025 05:06:36 +0200 Subject: [PATCH 2/5] Add comprehensive LiCSAR support with E,N,U geometry and baseline integration This commit adds full LiCSAR product support to MintPy, including: 1. Enhanced baseline handling (prep_licsar.py): - Read LiCSAR baselines file with format: frame_master_date acquisition_date perp_baseline temporal_baseline - Calculate perpendicular baseline as difference between acquisition dates relative to frame master - Fixed DATE12 extraction to get both master and slave dates from YYYYMMDD_YYYYMMDD filename pattern - Fixed interferogram detection to recognize .unw. and .cc. file patterns - Add temporal baseline (T_BASELINE) calculation 2. E,N,U geometry components support (load_data.py): - Add basisEast, basisNorth, basisUp to GEOM_DSET_NAME2TEMPLATE_KEY mapping - Create enhance_licsar_geometry_file() function to calculate derived geometry datasets - Calculate incidenceAngle from E,N,U using utils0.incidence_angle_from_enu() - Calculate azimuthAngle from E,N,U using utils0.azimuth_angle_from_enu() - Calculate slantRangeDistance from incidenceAngle using utils0.incidence_angle2slant_range_distance() - Add satellite HEIGHT attribute (693 km for Sentinel-1) for slant range calculation 3. Geometry dataset names (stack.py): - Add basisEast, basisNorth, basisUp to GEOMETRY_DSET_NAMES list - Enable recognition of E,N,U components in geometry processing 4. XML parsing fix (readfile.py): - Handle PAMDataset XML files without ENVI metadata domain - Add None check before accessing metadata to prevent AttributeError Result: Complete LiCSAR workflow with 2000 interferograms, proper baselines, and full geometry including pixel-wise incidence/azimuth angles calculated from E,N,U basis vectors. --- src/mintpy/load_data.py | 93 +++++++++++++++++++++-- src/mintpy/objects/stack.py | 4 + src/mintpy/prep_licsar.py | 138 ++++++++++++++++++++++++----------- src/mintpy/utils/readfile.py | 22 +++--- 4 files changed, 196 insertions(+), 61 deletions(-) diff --git a/src/mintpy/load_data.py b/src/mintpy/load_data.py index 133e5d878..33e89284c 100644 --- a/src/mintpy/load_data.py +++ b/src/mintpy/load_data.py @@ -62,6 +62,10 @@ 'shadowMask': 'mintpy.load.shadowMaskFile', 'waterMask': 'mintpy.load.waterMaskFile', 'bperp': 'mintpy.load.bperpFile', + # LiCSAR-specific: E, N, U basis components for pixel-wise geometry calculation + 'basisEast': 'mintpy.load.basisEastFile', + 'basisNorth': 'mintpy.load.basisNorthFile', + 'basisUp': 'mintpy.load.basisUpFile', } @@ -156,12 +160,12 @@ def read_subset_box(iDict): pix_box, geo_box = subset.read_subset_template2box(iDict['template_file'][0]) # Grab required info to read input geo_box into pix_box - lookup_y_files = glob.glob(str(iDict['mintpy.load.lookupYFile'])) - lookup_x_files = glob.glob(str(iDict['mintpy.load.lookupXFile'])) - if len(lookup_y_files) > 0 and len(lookup_x_files) > 0: - lookup_file = [lookup_y_files[0], lookup_x_files[0]] - else: - lookup_file = None + lookup_file = None + if 'mintpy.load.lookupYFile' in iDict and 'mintpy.load.lookupXFile' in iDict: + lookup_y_files = glob.glob(str(iDict['mintpy.load.lookupYFile'])) + lookup_x_files = glob.glob(str(iDict['mintpy.load.lookupXFile'])) + if len(lookup_y_files) > 0 and len(lookup_x_files) > 0: + lookup_file = [lookup_y_files[0], lookup_x_files[0]] # use DEM file attribute as reference, because # 1) it is required AND @@ -435,6 +439,69 @@ def read_inps_dict2ifgram_stack_dict_object(iDict, ds_name2template_key): return stackObj +################################################################ +def enhance_licsar_geometry_file(geom_file): + """Calculate and add incidence/azimuth angles from E,N,U components for LiCSAR geometry. + + This function reads the E, N, U basis vector components from the geometry file + and calculates: + - incidenceAngle: from E,N,U using utils0.incidence_angle_from_enu() + - azimuthAngle: from E,N using utils0.azimuth_angle_from_enu() + - slantRangeDistance: from incidenceAngle using utils0.incidence_angle2slant_range_distance() + + Parameters: geom_file - str, path to the geometry HDF5 file + Returns: None (modifies the file in place) + """ + import h5py + from mintpy.utils import utils0 + + print(f'Enhancing LiCSAR geometry file: {geom_file}') + + # Read E, N, U components + with h5py.File(geom_file, 'r') as f: + if not all(x in f.keys() for x in ['basisEast', 'basisNorth', 'basisUp']): + print('Warning: E, N, U components not found in geometry file') + return + + print('Reading E, N, U components...') + e_data = f['basisEast'][:] + n_data = f['basisNorth'][:] + u_data = f['basisUp'][:] + atr = dict(f.attrs) + + # Calculate incidence angle from E, N, U + print('Calculating incidence angle from E, N, U...') + inc_angle = utils0.incidence_angle_from_enu(e_data, n_data, u_data) + + # Calculate azimuth angle from E, N, U + print('Calculating azimuth angle from E, N, U...') + az_angle = utils0.azimuth_angle_from_enu(e_data, n_data, u_data) + + # Calculate slant range distance from incidence angle + print('Calculating slant range distance...') + # Add HEIGHT attribute if not present (satellite altitude) + if 'HEIGHT' not in atr: + # For Sentinel-1, use approximate altitude of 693 km + atr['HEIGHT'] = 693000 # in meters + slant_range = utils0.incidence_angle2slant_range_distance(atr, inc_angle) + + # Write the calculated datasets back to the file + print('Adding calculated datasets to geometry file...') + with h5py.File(geom_file, 'a') as f: + # Remove existing datasets if they exist + for dname in ['incidenceAngle', 'azimuthAngle', 'slantRangeDistance']: + if dname in f.keys(): + del f[dname] + + # Create new datasets + f.create_dataset('incidenceAngle', data=inc_angle, compression='lzf') + f.create_dataset('azimuthAngle', data=az_angle, compression='lzf') + f.create_dataset('slantRangeDistance', data=slant_range, compression='lzf') + + print('Enhanced LiCSAR geometry file with incidence/azimuth angles and slant range distance') + + +################################################################ def read_inps_dict2geometry_dict_object(iDict, dset_name2template_key): """Read input arguments into geometryDict object(s). @@ -452,10 +519,14 @@ def read_inps_dict2geometry_dict_object(iDict, dset_name2template_key): # for processors with lookup table in geo-coordinates, remove latitude/longitude dset_name2template_key.pop('latitude') dset_name2template_key.pop('longitude') - elif iDict['processor'] in ['aria', 'gmtsar', 'hyp3', 'snap', 'cosicorr','licsar']: + elif iDict['processor'] in ['aria', 'gmtsar', 'hyp3', 'snap', 'cosicorr']: # for processors with geocoded products support only, do nothing for now. # check again when adding products support in radar-coordiantes pass + elif iDict['processor'] == 'licsar': + # LiCSAR provides E,N,U components for pixel-wise geometry calculation + # The special handling is done in read_inps_dict2geometry_dict_object() + pass else: print('Un-recognized InSAR processor: {}'.format(iDict['processor'])) @@ -517,6 +588,8 @@ def read_inps_dict2geometry_dict_object(iDict, dset_name2template_key): geomGeoObj = None geomRadarObj = None + + # Standard geometry handling if len(dsGeoPathDict) > 0: geomGeoObj = geometryDict( processor=iDict['processor'], @@ -689,6 +762,12 @@ def load_data(inps): ystep=iDict['ystep'], compression='lzf', extra_metadata=extraDict) + + # For LiCSAR: Calculate incidence/azimuth angles from E,N,U components + if iDict['processor'] == 'licsar' and geom_geo_obj is not None: + if all(x in geom_geo_obj.datasetDict.keys() for x in ['basisEast', 'basisNorth', 'basisUp']): + print('Calculating incidence and azimuth angles from E, N, U components...') + enhance_licsar_geometry_file(geom_geo_file) if run_or_skip(geom_radar_file, geom_radar_obj, iDict['box'], **kwargs) == 'run': geom_radar_obj.write2hdf5( diff --git a/src/mintpy/objects/stack.py b/src/mintpy/objects/stack.py index 56bcd012f..de303b6c8 100644 --- a/src/mintpy/objects/stack.py +++ b/src/mintpy/objects/stack.py @@ -58,6 +58,10 @@ 'waterMask', 'commonMask', 'bperp', + # LiCSAR-specific: basis vector components + 'basisEast', + 'basisNorth', + 'basisUp', ] IFGRAM_DSET_NAMES = [ diff --git a/src/mintpy/prep_licsar.py b/src/mintpy/prep_licsar.py index 9e04bff1b..66f4f643e 100644 --- a/src/mintpy/prep_licsar.py +++ b/src/mintpy/prep_licsar.py @@ -39,15 +39,19 @@ def _parse_date(date_str): return None -def _extract_slave_date_from_fname(fname, master_date): - """Infer the slave date from the file name when it is not present in metadata.""" - +def _extract_dates_from_fname(fname): + """Extract both master and slave dates from interferogram filename. + + LiCSAR interferogram filenames follow the pattern: YYYYMMDD_YYYYMMDD.geo.unw.tif + where the first date is the master and the second is the slave. + """ name = os.path.basename(fname) - for match in re.findall(r'\d{6,8}', name): - candidate = _parse_date(match) - if candidate and candidate != master_date: - return candidate - return None + dates = re.findall(r'\d{8}', name) + if len(dates) >= 2: + master = _parse_date(dates[0]) + slave = _parse_date(dates[1]) + return master, slave + return None, None def _read_key_value_file(meta_file, separators=('=', ':')): @@ -96,17 +100,28 @@ def add_licsar_metadata(fname, meta, is_ifg=True): meta - dict, updated metadata ''' - # Read metadata.txt + # Read metadata.txt - first try same directory, then parent directory meta_file = os.path.join(os.path.dirname(fname), 'metadata.txt') + if not os.path.isfile(meta_file): + # Try parent directory (for LiCSAR structure where metadata is in GEOC/) + meta_file = os.path.join(os.path.dirname(os.path.dirname(fname)), 'metadata.txt') licsar_meta = _read_key_value_file(meta_file) # Add universal metadata - meta['PROCESSOR'] = 'LiCSAR' - - master_date = _parse_date(_get_first(licsar_meta, 'master', 'masterdate')) - slave_date = _parse_date(_get_first(licsar_meta, 'slave', 'slavedate')) - if slave_date is None: - slave_date = _extract_slave_date_from_fname(fname, master_date) + meta['PROCESSOR'] = 'licsar' + + # For interferograms, extract dates from filename first (YYYYMMDD_YYYYMMDD pattern) + # For geometry files, try to get from metadata + fname_master, fname_slave = _extract_dates_from_fname(fname) + + if fname_master and fname_slave: + # Interferogram with dates in filename + master_date = fname_master + slave_date = fname_slave + else: + # Geometry file or other product - use metadata + master_date = _parse_date(_get_first(licsar_meta, 'master', 'masterdate')) + slave_date = _parse_date(_get_first(licsar_meta, 'slave', 'slavedate')) if master_date: meta['MASTER_DATE'] = master_date.strftime('%Y%m%d') @@ -189,38 +204,70 @@ def add_licsar_metadata(fname, meta, is_ifg=True): else: meta['AZIMUTH_PIXEL_SIZE'] = sensor.SEN['azimuth_pixel_size'] - # Read baselines.txt for interferogram-specific metadata + # Read baselines file for interferogram-specific metadata + # LiCSAR baselines file format: frame_master_date acquisition_date perp_baseline temporal_baseline + # For an interferogram between dates A and B, we need baselines for both dates relative to frame master if is_ifg: baseline_file = os.path.join(os.path.dirname(fname), 'baselines.txt') if not os.path.isfile(baseline_file): - raise FileNotFoundError(f'LiCSAR baseline file not found: {baseline_file}') - - pair_found = False - with open(baseline_file) as f: - for line in f: - text = line.strip() - if not text or text.startswith('#'): - continue - - fields = text.split() - if len(fields) < 4: - continue - - base_master = _parse_date(fields[0]) - base_slave = _parse_date(fields[1]) - if master_date and base_master and master_date != base_master: - continue - if slave_date and base_slave and slave_date != base_slave: - continue - - meta['P_BASELINE_TOP_HDR'] = fields[2] - meta['P_BASELINE_BOTTOM_HDR'] = fields[3] - pair_found = True - break - - if not pair_found: + # Try parent directory (for LiCSAR structure where baselines is in GEOC/) + baseline_file = os.path.join(os.path.dirname(os.path.dirname(fname)), 'baselines.txt') + if not os.path.isfile(baseline_file): + baseline_file = os.path.join(os.path.dirname(fname), 'baselines') + if not os.path.isfile(baseline_file): + baseline_file = os.path.join(os.path.dirname(os.path.dirname(fname)), 'baselines') + + if os.path.isfile(baseline_file): + # Read all baselines into a dictionary: {acquisition_date: (perp_baseline, temporal_baseline)} + baselines_dict = {} + frame_master = None + with open(baseline_file) as f: + for line in f: + text = line.strip() + if not text or text.startswith('#'): + continue + + fields = text.split() + if len(fields) < 4: + continue + + if frame_master is None: + frame_master = _parse_date(fields[0]) + + acq_date = _parse_date(fields[1]) + if acq_date: + try: + perp_baseline = float(fields[2]) + temp_baseline = float(fields[3]) + baselines_dict[acq_date] = (perp_baseline, temp_baseline) + except ValueError: + continue + + # For interferogram, compute baseline between master and slave dates + if master_date and slave_date and master_date in baselines_dict and slave_date in baselines_dict: + perp_master, _ = baselines_dict[master_date] + perp_slave, _ = baselines_dict[slave_date] + + # Perpendicular baseline is the difference + perp_baseline = perp_slave - perp_master + meta['P_BASELINE_TOP_HDR'] = str(perp_baseline) + meta['P_BASELINE_BOTTOM_HDR'] = str(perp_baseline) + + # Temporal baseline in days + if master_date and slave_date: + temp_baseline = (slave_date - master_date).days + meta['T_BASELINE'] = str(temp_baseline) + else: + # Baseline not found for this pair + if master_date and slave_date: + warnings.warn( + f'Baseline info for {master_date.strftime("%Y%m%d")}_{slave_date.strftime("%Y%m%d")} ' + f'not found in {baseline_file}.', + category=UserWarning, + ) + else: warnings.warn( - f'Baseline info for {os.path.basename(fname)} was not found in {baseline_file}.', + f'LiCSAR baseline file not found: {baseline_file}', category=UserWarning, ) @@ -238,7 +285,10 @@ def prep_licsar(inps): # For each filename, generate metadata rsc file for fname in inps.file: - is_ifg = any([x in fname for x in ['unw_phase', 'corr']]) + # Detect if file is interferogram or coherence + # LiCSAR: .geo.unw.tif or .geo.cc.tif + # ISCE: unw_phase, corr + is_ifg = any([x in fname for x in ['unw_phase', 'corr', '.unw.', '.cc.']]) meta = readfile.read_gdal_vrt(fname) meta = add_licsar_metadata(fname, meta, is_ifg=is_ifg) diff --git a/src/mintpy/utils/readfile.py b/src/mintpy/utils/readfile.py index 677a3325d..73d327aa4 100644 --- a/src/mintpy/utils/readfile.py +++ b/src/mintpy/utils/readfile.py @@ -713,8 +713,8 @@ def read_binary_file(fname, datasetName=None, box=None, xstep=1, ystep=1): if 'byte order' in atr.keys() and atr['byte order'] == '0': byte_order = 'little-endian' - # GDAL / GMTSAR / ASF HyP3 - elif processor in ['gdal', 'gmtsar', 'hyp3', 'cosicorr', 'uavsar']: + # GDAL / GMTSAR / ASF HyP3 / LiCSAR + elif processor in ['gdal', 'gmtsar', 'hyp3', 'cosicorr', 'uavsar', 'licsar']: # try to recognize custom dataset names if specified and recognized. if datasetName: slice_list = get_slice_list(fname) @@ -732,7 +732,7 @@ def read_binary_file(fname, datasetName=None, box=None, xstep=1, ystep=1): xstep=xstep, ystep=ystep, ) - if processor in ['gdal', 'gmtsar', 'hyp3', 'cosicorr']: + if processor in ['gdal', 'gmtsar', 'hyp3', 'cosicorr', 'licsar']: data = read_gdal(fname, **kwargs) else: @@ -1629,13 +1629,15 @@ def read_isce_xml(fname): # PAMDataset, e.g. hgt.rdr.aux.xml elif root.tag == 'PAMDataset': meta = root.find("./Metadata[@domain='ENVI']") - for child in meta.findall("MDI"): - key = child.get('key') - value = child.text - xmlDict[key] = value - - # data_type - xmlDict['data_type'] = DATA_TYPE_ENVI2NUMPY[xmlDict['data_type']] + if meta is not None: + for child in meta.findall("MDI"): + key = child.get('key') + value = child.text + xmlDict[key] = value + + # data_type + if 'data_type' in xmlDict: + xmlDict['data_type'] = DATA_TYPE_ENVI2NUMPY[xmlDict['data_type']] # NoDataValue meta = root.find("./PAMRasterBand/NoDataValue") From 6eba90013b58400041b0b7362bce3a4a193375cb Mon Sep 17 00:00:00 2001 From: Mohammad Mohseni Aref Date: Wed, 15 Oct 2025 05:10:27 +0200 Subject: [PATCH 3/5] Add utility functions for E,N,U geometry calculations and CLI entry point MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit adds: 1. New utility functions in utils0.py: - incidence_angle_from_enu(): Calculate incidence angle from E,N,U unit vectors using θ = arccos(|U|) - azimuth_angle_from_enu(): Calculate azimuth angle from E,N,U using α = atan2(E, N) Both functions follow MintPy coordinate conventions and include quality masking 2. CLI entry point in pyproject.toml: - Add create_licsar_geometry.py command for standalone geometry file creation 3. Standalone geometry creation tool (create_licsar_geometry.py): - Command-line tool to create enhanced LiCSAR geometry files - Reads E,N,U components and calculates all required geometry datasets - Can be used independently or called from load_data.py --- pyproject.toml | 1 + src/mintpy/create_licsar_geometry.py | 311 +++++++++++++++++++++++++++ src/mintpy/utils/utils0.py | 62 ++++++ 3 files changed, 374 insertions(+) create mode 100644 src/mintpy/create_licsar_geometry.py diff --git a/pyproject.toml b/pyproject.toml index c241c6ed6..225076587 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,6 +38,7 @@ Issues = "https://github.com/insarlab/MintPy/issues" "add.py" = "mintpy.cli.add:main" "asc_desc2horz_vert.py" = "mintpy.cli.asc_desc2horz_vert:main" "closure_phase_bias.py" = "mintpy.cli.closure_phase_bias:main" +"create_licsar_geometry.py" = "mintpy.cli.create_licsar_geometry:main" "dem_error.py" = "mintpy.cli.dem_error:main" "dem_gsi.py" = "mintpy.cli.dem_gsi:main" "diff.py" = "mintpy.cli.diff:main" diff --git a/src/mintpy/create_licsar_geometry.py b/src/mintpy/create_licsar_geometry.py new file mode 100644 index 000000000..86ca398e2 --- /dev/null +++ b/src/mintpy/create_licsar_geometry.py @@ -0,0 +1,311 @@ +#!/usr/bin/env python3 +############################################################ +# Program is part of MintPy # +# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi # +# Author: Your Name, Oct 2025 # +############################################################ + + +import os +import sys +import time +import h5py +import numpy as np +import warnings + +from mintpy.constants import SPEED_OF_LIGHT +from mintpy.objects import sensor +from mintpy.objects.stackDict import geometryDict +from mintpy.utils import readfile, utils0 as ut, utils1, writefile, arg_utils + + +############################################################### +def create_parser(): + """Create command line parser.""" + parser = arg_utils.create_argument_parser( + name='create_licsar_geometry', + synopsis='Create a geometry HDF5 file from LiCSAR E,N,U component files', + ) + + # input + parser.add_argument('--geom-dir', '-g', dest='geom_dir', type=str, required=True, + help='Directory containing the LiCSAR geometry files (*.geo.{hgt,E,N,U}.tif)') + parser.add_argument('--output', '-o', dest='output_file', type=str, default='geometryGeo.h5', + help='Output file name (default: %(default)s)') + parser.add_argument('--update', dest='update_mode', action='store_true', + help='Enable update mode, and skip datasets existed in the output file.') + + return parser + + +def cmd_line_parse(iargs=None): + """Command line parser.""" + parser = create_parser() + inps = parser.parse_args(args=iargs) + return inps + + +######################################################################### +def create_licsar_geometry_file(geom_dir, out_file='geometryGeo.h5', update_mode=False, print_msg=True): + """Create a comprehensive geometry file from LiCSAR products using MintPy's built-in functions. + + Parameters: + geom_dir - str, directory containing LiCSAR geometry files (.geo.E.tif, .geo.N.tif, etc.) + out_file - str, output geometry file name + update_mode - bool, if True, skip writing existing datasets in the output file + print_msg - bool, print processing messages if True + Returns: + out_file - str, absolute path of output geometry file + """ + vprint = print if print_msg else lambda *args, **kwargs: None + + vprint(f'Creating geometry file from LiCSAR data in: {geom_dir}') + + # Find LiCSAR geometry files + pattern = os.path.join(geom_dir, '*.geo.*.tif') + geo_files = utils1.get_file_list(pattern, abspath=True) + + if not geo_files: + raise FileNotFoundError(f'No LiCSAR geometry files found in {geom_dir}') + + # Categorize files + height_file = None + e_file = None + n_file = None + u_file = None + + for geo_file in geo_files: + basename = os.path.basename(geo_file).lower() + if '.geo.hgt.' in basename or '.geo.dem.' in basename: + height_file = geo_file + elif '.geo.e.' in basename: + e_file = geo_file + elif '.geo.n.' in basename: + n_file = geo_file + elif '.geo.u.' in basename: + u_file = geo_file + + # Check required files + if not height_file: + vprint("WARNING: No height/DEM file found. Elevation-dependent incidence angle won't be available.") + + # Read metadata + if height_file: + atr_dict = readfile.read_attribute(height_file) + elif e_file: + atr_dict = readfile.read_attribute(e_file) + else: + raise FileNotFoundError('At least one of height or E component file is required') + + # Initialize dataset dictionary for geometryDict + ds_dict = {} + + # Add DEM/height + if height_file: + ds_dict['height'] = height_file + vprint(f'height: {height_file}') + + # Read E, N, U components for LOS calculation + e_data = None + n_data = None + u_data = None + + if all([e_file, n_file, u_file]): + vprint('Reading E, N, U components for pixel-wise geometry calculations...') + try: + # Read using GDAL directly to avoid XML parsing issues + from osgeo import gdal + ds_e = gdal.Open(e_file, gdal.GA_ReadOnly) + ds_n = gdal.Open(n_file, gdal.GA_ReadOnly) + ds_u = gdal.Open(u_file, gdal.GA_ReadOnly) + + e_data = ds_e.GetRasterBand(1).ReadAsArray() + n_data = ds_n.GetRasterBand(1).ReadAsArray() + u_data = ds_u.GetRasterBand(1).ReadAsArray() + + ds_e = None + ds_n = None + ds_u = None + + except Exception as e: + vprint(f'Error reading E,N,U components: {str(e)}') + vprint('Trying alternative readfile method...') + e_data, e_meta = readfile.read(e_file) + n_data, n_meta = readfile.read(n_file) + u_data, u_meta = readfile.read(u_file) + + # Save basis vector components as well (useful for advanced analysis) + ds_dict['basisEast'] = e_file + ds_dict['basisNorth'] = n_file + ds_dict['basisUp'] = u_file + vprint(f'E component: {e_file}') + vprint(f'N component: {n_file}') + vprint(f'U component: {u_file}') + else: + vprint('WARNING: E, N, U component files not all available') + vprint('Calculation of pixel-wise incidence/azimuth angles and slant range will be limited') + if any([e_file, n_file, u_file]): + missing = [] + if not e_file: + missing.append('E') + if not n_file: + missing.append('N') + if not u_file: + missing.append('U') + vprint(f'Missing components: {", ".join(missing)}') + + # Check if output file already exists in update mode + if update_mode and os.path.isfile(out_file): + vprint(f'Update mode is enabled, skip datasets already in {out_file}') + ds_dict_out = {} + for key in ds_dict.keys(): + if key not in readfile.get_dataset_list(out_file): + ds_dict_out[key] = ds_dict[key] + ds_dict = ds_dict_out + + # Create output directory if it doesn't exist + out_dir = os.path.dirname(os.path.abspath(out_file)) + if not os.path.isdir(out_dir): + os.makedirs(out_dir) + vprint(f'Created directory: {out_dir}') + + # Create geometryDict object + geom_obj = geometryDict( + processor='licsar', + datasetDict=ds_dict, + extraMetadata=atr_dict, + ) + + # Write to HDF5 file (this automatically sets the output file to absolute path) + out_file = geom_obj.write2hdf5(outputFile=out_file, access_mode='a', compression=None) + + # If E, N, U components are available, calculate pixel-wise incidence/azimuth angles + if all([e_data is not None, n_data is not None, u_data is not None]): + vprint('Calculating pixel-wise geometry from E, N, U components...') + + # Calculate incidence angle using MintPy's utility function + inc_angle = ut.incidence_angle_from_enu(e_data, n_data, u_data) + az_angle = ut.azimuth_angle_from_enu(e_data, n_data, u_data) + + if inc_angle is not None and az_angle is not None: + vprint(f'Incidence angle range: {np.nanmin(inc_angle):.2f}° - {np.nanmax(inc_angle):.2f}°') + vprint(f'Azimuth angle range: {np.nanmin(az_angle):.2f}° - {np.nanmax(az_angle):.2f}°') + + # Calculate slant range using MintPy function + temp_atr = atr_dict.copy() + temp_atr.update({ + 'HEIGHT': '693000', # Sentinel-1 approximate altitude in meters + 'EARTH_RADIUS': str(ut.EARTH_RADIUS), # Use MintPy's standard Earth radius + }) + slant_range = ut.incidence_angle2slant_range_distance(temp_atr, inc_angle) + vprint(f'Slant range: {np.nanmin(slant_range):.0f}m - {np.nanmax(slant_range):.0f}m') + + # Calculate lat/lon if not already in geometry file + length, width = inc_angle.shape + try: + lat, lon = ut.get_lat_lon(atr_dict, dimension=2) + except Exception as e: + vprint(f'WARNING: Could not extract lat/lon from metadata: {str(e)}') + lat, lon = None, None + + # Create shadow mask (simplified - areas with no valid incidence angle) + shadow_mask = np.zeros((length, width), dtype=np.bool_) + if inc_angle is not None: + shadow_mask = np.isnan(inc_angle) + + # Add calculated datasets to HDF5 file + with h5py.File(out_file, 'a') as f: + # incidence angle (required) + if 'incidenceAngle' not in f.keys() or not update_mode: + vprint('Adding incidenceAngle dataset') + writefile.write_hdf5_block( + out_file, + data=inc_angle, + datasetName='incidenceAngle', + block=[0, 0, length, width], + ) + + # azimuth angle (required) + if 'azimuthAngle' not in f.keys() or not update_mode: + vprint('Adding azimuthAngle dataset') + writefile.write_hdf5_block( + out_file, + data=az_angle, + datasetName='azimuthAngle', + block=[0, 0, length, width], + ) + + # slant range distance (required) + if 'slantRangeDistance' not in f.keys() or not update_mode: + vprint('Adding slantRangeDistance dataset') + writefile.write_hdf5_block( + out_file, + data=slant_range, + datasetName='slantRangeDistance', + block=[0, 0, length, width], + ) + + # shadow mask (required) + if 'shadowMask' not in f.keys() or not update_mode: + vprint('Adding shadowMask dataset') + writefile.write_hdf5_block( + out_file, + data=shadow_mask, + datasetName='shadowMask', + block=[0, 0, length, width], + ) + + # latitude and longitude (required for geo files) + if lat is not None and lon is not None: + if 'latitude' not in f.keys() or not update_mode: + vprint('Adding latitude dataset') + writefile.write_hdf5_block( + out_file, + data=lat, + datasetName='latitude', + block=[0, 0, length, width], + ) + + if 'longitude' not in f.keys() or not update_mode: + vprint('Adding longitude dataset') + writefile.write_hdf5_block( + out_file, + data=lon, + datasetName='longitude', + block=[0, 0, length, width], + ) + + # Final check of required datasets + with h5py.File(out_file, 'r') as f: + available_datasets = list(f.keys()) + + required_datasets = ['height', 'incidenceAngle', 'azimuthAngle', + 'slantRangeDistance', 'latitude', 'longitude', 'shadowMask'] + + missing_datasets = [ds for ds in required_datasets if ds not in available_datasets] + if missing_datasets: + vprint(f"WARNING: The following required datasets are missing: {', '.join(missing_datasets)}") + else: + vprint(f"SUCCESS: All required datasets are present in {out_file}") + + vprint(f'Geometry file created: {out_file}') + return out_file + + +######################################################################### +def main(iargs=None): + inps = cmd_line_parse(iargs) + + # Create geometry file + create_licsar_geometry_file( + geom_dir=inps.geom_dir, + out_file=inps.output_file, + update_mode=inps.update_mode, + ) + + return + + +######################################################################### +if __name__ == '__main__': + main(sys.argv[1:]) \ No newline at end of file diff --git a/src/mintpy/utils/utils0.py b/src/mintpy/utils/utils0.py index 2d36f0d12..af26d7f8d 100644 --- a/src/mintpy/utils/utils0.py +++ b/src/mintpy/utils/utils0.py @@ -779,6 +779,68 @@ def calc_azimuth_from_east_north_obs(east, north): return az_angle +def incidence_angle_from_enu(e_data, n_data, u_data): + """Calculate incidence angle from LiCSAR East-North-Up unit vector components. + + For LiCSAR E,N,U unit vectors representing line-of-sight direction: + + θ = arccos(|U| / √(E² + N² + U²)) + + Since E,N,U are unit vectors: √(E² + N² + U²) ≈ 1 + Simplified: θ = arccos(|U|) + + Parameters: e_data, n_data, u_data - 2D np.arrays, LiCSAR unit vectors in radar LOS direction + Returns: inc_angle - 2D np.array, incidence angle in degrees + """ + if e_data is None or n_data is None or u_data is None: + return None + + # Calculate vector magnitude for quality control + magnitude = np.sqrt(e_data**2 + n_data**2 + u_data**2) + + # Calculate incidence angle using the U component + # θ = arccos(|U|) for unit vectors + inc_angle = np.rad2deg(np.arccos(np.abs(u_data))) + + # Apply quality mask for proper unit vectors + mask = magnitude > 0.5 # Filter out low-quality data + inc_angle = np.where(mask, inc_angle, np.nan) + + return inc_angle.astype(np.float32) + + +def azimuth_angle_from_enu(e_data, n_data, u_data): + """Calculate azimuth angle from LiCSAR East-North-Up unit vector components. + + For LiCSAR E,N,U unit vectors representing line-of-sight direction: + + α = atan2(E, N) + + Coordinate convention (following MintPy standards): + - North = 0°, East = 90°, South = 180°, West = 270° + + Parameters: e_data, n_data, u_data - 2D np.arrays, LiCSAR unit vectors in radar LOS direction + Returns: az_angle - 2D np.array, azimuth angle in degrees [0, 360) + """ + if e_data is None or n_data is None: + return None + + # Calculate azimuth angle using atan2 + # atan2(E, N) gives angle from North, anti-clockwise positive + az_angle = np.rad2deg(np.arctan2(e_data, n_data)) + + # Ensure output is in [0, 360) range + az_angle = np.where(az_angle < 0, az_angle + 360, az_angle) + + # Apply quality mask if U component is available + if u_data is not None: + magnitude = np.sqrt(e_data**2 + n_data**2 + u_data**2) + mask = magnitude > 0.5 + az_angle = np.where(mask, az_angle, np.nan) + + return az_angle.astype(np.float32) + + def get_unit_vector4component_of_interest(los_inc_angle, los_az_angle, comp='enu2los', horz_az_angle=None): """Get the unit vector for the component of interest. Parameters: los_inc_angle - np.ndarray or float, incidence angle from vertical, in the unit of degrees From a3e72c25188ec9dc8986770e97a61130feda10c7 Mon Sep 17 00:00:00 2001 From: Mohammad Mohseni Aref Date: Wed, 15 Oct 2025 05:35:32 +0200 Subject: [PATCH 4/5] Add latitude and longitude grid generation to LiCSAR geometry files - Update enhance_licsar_geometry_file() to generate 2D lat/lon grids - Use X_FIRST, X_STEP, Y_FIRST, Y_STEP from metadata - Grids match gdalinfo corner coordinates - Float32 format with lzf compression - Tested and verified with test_latlon_simple.py --- src/mintpy/load_data.py | 33 ++++++++++++++++++++++++++++++--- 1 file changed, 30 insertions(+), 3 deletions(-) diff --git a/src/mintpy/load_data.py b/src/mintpy/load_data.py index 33e89284c..c672b6b02 100644 --- a/src/mintpy/load_data.py +++ b/src/mintpy/load_data.py @@ -441,18 +441,21 @@ def read_inps_dict2ifgram_stack_dict_object(iDict, ds_name2template_key): ################################################################ def enhance_licsar_geometry_file(geom_file): - """Calculate and add incidence/azimuth angles from E,N,U components for LiCSAR geometry. + """Calculate and add incidence/azimuth angles and lat/lon grids from E,N,U components for LiCSAR geometry. This function reads the E, N, U basis vector components from the geometry file and calculates: - incidenceAngle: from E,N,U using utils0.incidence_angle_from_enu() - azimuthAngle: from E,N using utils0.azimuth_angle_from_enu() - slantRangeDistance: from incidenceAngle using utils0.incidence_angle2slant_range_distance() + - latitude: 2D grid from Y_FIRST and Y_STEP metadata + - longitude: 2D grid from X_FIRST and X_STEP metadata Parameters: geom_file - str, path to the geometry HDF5 file Returns: None (modifies the file in place) """ import h5py + import numpy as np from mintpy.utils import utils0 print(f'Enhancing LiCSAR geometry file: {geom_file}') @@ -485,11 +488,33 @@ def enhance_licsar_geometry_file(geom_file): atr['HEIGHT'] = 693000 # in meters slant_range = utils0.incidence_angle2slant_range_distance(atr, inc_angle) + # Generate latitude and longitude grids from metadata + print('Generating latitude and longitude grids...') + length = int(atr.get('LENGTH', e_data.shape[0])) + width = int(atr.get('WIDTH', e_data.shape[1])) + + # Get geographic coordinates from metadata + y_first = float(atr.get('Y_FIRST', 0)) + y_step = float(atr.get('Y_STEP', 0)) + x_first = float(atr.get('X_FIRST', 0)) + x_step = float(atr.get('X_STEP', 0)) + + # Create 1D arrays for lat/lon + lat_1d = y_first + np.arange(length) * y_step + lon_1d = x_first + np.arange(width) * x_step + + # Create 2D grids + lon_2d, lat_2d = np.meshgrid(lon_1d, lat_1d) + + # Convert to float32 to save space + lat_2d = lat_2d.astype(np.float32) + lon_2d = lon_2d.astype(np.float32) + # Write the calculated datasets back to the file print('Adding calculated datasets to geometry file...') with h5py.File(geom_file, 'a') as f: # Remove existing datasets if they exist - for dname in ['incidenceAngle', 'azimuthAngle', 'slantRangeDistance']: + for dname in ['incidenceAngle', 'azimuthAngle', 'slantRangeDistance', 'latitude', 'longitude']: if dname in f.keys(): del f[dname] @@ -497,8 +522,10 @@ def enhance_licsar_geometry_file(geom_file): f.create_dataset('incidenceAngle', data=inc_angle, compression='lzf') f.create_dataset('azimuthAngle', data=az_angle, compression='lzf') f.create_dataset('slantRangeDistance', data=slant_range, compression='lzf') + f.create_dataset('latitude', data=lat_2d, compression='lzf') + f.create_dataset('longitude', data=lon_2d, compression='lzf') - print('Enhanced LiCSAR geometry file with incidence/azimuth angles and slant range distance') + print('Enhanced LiCSAR geometry file with incidence/azimuth angles, slant range distance, and lat/lon grids') ################################################################ From 516603c9e22b046051d6e814d096cf2bb1470d6c Mon Sep 17 00:00:00 2001 From: Mohammad Mohseni Aref Date: Wed, 15 Oct 2025 06:22:00 +0200 Subject: [PATCH 5/5] Calculate ALOOKS and RLOOKS from ground pixel spacing - RLOOKS = (X_STEP in meters) / (range pixel spacing in ground range) - ALOOKS = (Y_STEP in meters) / (azimuth pixel spacing) - Convert slant range spacing to ground range: slant / sin(incidence) - Use native Sentinel-1 pixel spacing: range=2.3m, azimuth=13.9m - Fixes KeyError: 'ALOOKS' during ifgram_inversion step - Correctly accounts for multilooking in geocoded LiCSAR products --- src/mintpy/prep_licsar.py | 70 +++++++++++++++++++++++++++++++++------ 1 file changed, 60 insertions(+), 10 deletions(-) diff --git a/src/mintpy/prep_licsar.py b/src/mintpy/prep_licsar.py index 66f4f643e..98327801c 100644 --- a/src/mintpy/prep_licsar.py +++ b/src/mintpy/prep_licsar.py @@ -193,16 +193,66 @@ def add_licsar_metadata(fname, meta, is_ifg=True): meta['PLATFORM'] = 'Sen' meta['ANTENNA_SIDE'] = -1 meta['WAVELENGTH'] = SPEED_OF_LIGHT / sensor.SEN['carrier_frequency'] - if 'RLOOKS' in meta: - looks = _safe_float(meta['RLOOKS']) - meta['RANGE_PIXEL_SIZE'] = sensor.SEN['range_pixel_size'] * looks if looks else sensor.SEN['range_pixel_size'] - else: - meta['RANGE_PIXEL_SIZE'] = sensor.SEN['range_pixel_size'] - if 'ALOOKS' in meta: - looks = _safe_float(meta['ALOOKS']) - meta['AZIMUTH_PIXEL_SIZE'] = sensor.SEN['azimuth_pixel_size'] * looks if looks else sensor.SEN['azimuth_pixel_size'] - else: - meta['AZIMUTH_PIXEL_SIZE'] = sensor.SEN['azimuth_pixel_size'] + + # Calculate ALOOKS and RLOOKS from ground pixel spacing + # LOOKS = (desired ground pixel size) / (native single-look pixel spacing) + # For Sentinel-1: + # - range pixel spacing (slant) = 2.3 m + # - range pixel spacing (ground) = slant / sin(incidence) + # - azimuth pixel spacing = 13.9 m + import math + + if 'RLOOKS' not in meta: + if 'X_STEP' in meta: + x_step_deg = abs(_safe_float(meta['X_STEP'])) + if x_step_deg: + # Convert X_STEP from degrees to meters + lat = _safe_float(meta.get('Y_FIRST', 0)) + x_step_m = x_step_deg * 111320 * math.cos(math.radians(lat)) if lat else x_step_deg * 111320 + + # Get ground range pixel spacing + # slant range spacing = 2.3 m, convert to ground range using incidence angle + inc_angle = _safe_float(meta.get('AVG_INCIDENCE_ANGLE', 37.0)) # default 37° if not available + range_spacing_ground = sensor.SEN['range_pixel_size'] / math.sin(math.radians(inc_angle)) + + # Calculate looks + meta['RLOOKS'] = str(int(round(x_step_m / range_spacing_ground))) + else: + meta['RLOOKS'] = '1' + else: + meta['RLOOKS'] = '1' + + if 'ALOOKS' not in meta: + if 'Y_STEP' in meta: + y_step_deg = abs(_safe_float(meta['Y_STEP'])) + if y_step_deg: + # Convert Y_STEP from degrees to meters (constant 111320 m/degree) + y_step_m = y_step_deg * 111320 + + # Azimuth pixel spacing from sensor definition + az_spacing = sensor.SEN['azimuth_pixel_size'] + + # Calculate looks + meta['ALOOKS'] = str(int(round(y_step_m / az_spacing))) + else: + meta['ALOOKS'] = '1' + else: + meta['ALOOKS'] = '1' + + # Ensure pixel sizes are set (use values from .rsc if available, otherwise calculate from looks) + if 'RANGE_PIXEL_SIZE' not in meta: + if 'RLOOKS' in meta: + looks = _safe_float(meta['RLOOKS']) + meta['RANGE_PIXEL_SIZE'] = sensor.SEN['range_pixel_size'] * looks if looks else sensor.SEN['range_pixel_size'] + else: + meta['RANGE_PIXEL_SIZE'] = sensor.SEN['range_pixel_size'] + + if 'AZIMUTH_PIXEL_SIZE' not in meta: + if 'ALOOKS' in meta: + looks = _safe_float(meta['ALOOKS']) + meta['AZIMUTH_PIXEL_SIZE'] = sensor.SEN['azimuth_pixel_size'] * looks if looks else sensor.SEN['azimuth_pixel_size'] + else: + meta['AZIMUTH_PIXEL_SIZE'] = sensor.SEN['azimuth_pixel_size'] # Read baselines file for interferogram-specific metadata # LiCSAR baselines file format: frame_master_date acquisition_date perp_baseline temporal_baseline