From 589a17537d5abf1184795160de6a6e2d99583a75 Mon Sep 17 00:00:00 2001 From: acpaquette Date: Fri, 29 Aug 2025 15:56:12 -0700 Subject: [PATCH 1/6] Add ability to load gdal ISIS metadata --- ale/base/base.py | 5 ++-- ale/base/label_isis.py | 53 ++++++++++++++++++++++++++++++++++++-- ale/drivers/__init__.py | 18 ++++++++++++- ale/drivers/ody_drivers.py | 14 ++++++++++ 4 files changed, 84 insertions(+), 6 deletions(-) diff --git a/ale/base/base.py b/ale/base/base.py index 33d2bdd7a..2f3acc8c6 100644 --- a/ale/base/base.py +++ b/ale/base/base.py @@ -509,10 +509,9 @@ def read_geodata(self): from osgeo import gdal gdal.UseExceptions() except: - self._projection = "" - return self._projection + self._geodata = None + return self._geodata - self._geodata = None if isinstance(self._file, pvl.PVLModule): # save it to a temp folder with tempfile.NamedTemporaryFile() as tmp: diff --git a/ale/base/label_isis.py b/ale/base/label_isis.py index 718987454..f0cb5f14d 100644 --- a/ale/base/label_isis.py +++ b/ale/base/label_isis.py @@ -23,14 +23,36 @@ def label(self): if not hasattr(self, "_label"): if isinstance(self._file, pvl.PVLModule): self._label = self._file + return self._label + else: + self._label = None + grammar = pvl.grammar.ISISGrammar() grammar.comments+=(("#", "\n"), ) + try: self._label = pvl.loads(self._file, grammar=grammar) - except Exception: - self._label = pvl.load(self._file, grammar=grammar) except: + pass + + if not self._label: + try: + self._label = pvl.load(self._file, grammar=grammar) + except: + pass + + if not self._label: + try: + from osgeo import gdal + gdal.UseExceptions() + geodata = gdal.Open(label) + self._label = json.loads(geodata.GetMetadata("json:ISIS3")["doc"]) + except: + pass + + if not self._label: raise ValueError("{} is not a valid label".format(self._file)) + return self._label @property @@ -196,6 +218,9 @@ def spacecraft_clock_start_count(self): if isinstance(self._clock_start_count, pvl.Quantity): self._clock_start_count = self._clock_start_count.value + if isinstance(self._clock_start_count, dict): + self._clock_start_count = self._clock_start_count["value"] + self._clock_start_count = str(self._clock_start_count) return self._clock_start_count @@ -222,6 +247,9 @@ def spacecraft_clock_stop_count(self): if isinstance(self._clock_stop_count, pvl.Quantity): self._clock_stop_count = self._clock_stop_count.value + if isinstance(self._clock_stop_count, dict): + self._clock_stop_count = self._clock_stop_count["value"] + self._clock_stop_count = str(self._clock_stop_count) return self._clock_stop_count @@ -274,6 +302,13 @@ def exposure_duration(self): else: # if not milliseconds, the units are probably seconds exposure_duration = exposure_duration.value + elif isinstance(exposure_duration, dict): + units = exposure_duration["units"] + if "ms" in units.lower() or 'milliseconds' in units.lower(): + exposure_duration = exposure_duration["value"] * 0.001 + else: + # if not milliseconds, the units are probably seconds + exposure_duration = exposure_duration["value"] else: # if no units are available, assume the exposure duration is given in milliseconds exposure_duration = exposure_duration * 0.001 @@ -299,6 +334,13 @@ def line_exposure_duration(self): else: # if not milliseconds, the units are probably seconds line_exposure_duration = line_exposure_duration.value + elif isinstance(line_exposure_duration, dict): + units = line_exposure_duration["unit"] + if "ms" in units.lower(): + line_exposure_duration = line_exposure_duration["value"] * 0.001 + else: + # if not milliseconds, the units are probably seconds + line_exposure_duration = line_exposure_duration["value"] else: # if no units are available, assume the exposure duration is given in milliseconds line_exposure_duration = line_exposure_duration * 0.001 @@ -323,6 +365,13 @@ def interframe_delay(self): else: # if not milliseconds, the units are probably seconds interframe_delay = interframe_delay.value + elif isinstance(interframe_delay, dict): + units = interframe_delay["units"] + if "ms" in units.lower(): + interframe_delay = interframe_delay["value"] * 0.001 + else: + # if not milliseconds, the units are probably seconds + interframe_delay = interframe_delay["value"] else: # if no units are available, assume the interframe delay is given in milliseconds interframe_delay = interframe_delay * 0.001 diff --git a/ale/drivers/__init__.py b/ale/drivers/__init__.py index 1c9cb0974..51dacad22 100644 --- a/ale/drivers/__init__.py +++ b/ale/drivers/__init__.py @@ -125,7 +125,10 @@ def load(label, props={}, formatter='ale', verbose=False, only_isis_spice=False, if verbose: logger.info("First parse attempt failed with") logger.info(e) - # If pds3 label fails, try isis grammar + parsed_label = None + + # If pds3 label fails, try isis grammar + if not parsed_label: try: parsed_label = parse_label(label, pvl.grammar.ISISGrammar()) except Exception as e: @@ -135,6 +138,19 @@ def load(label, props={}, formatter='ale', verbose=False, only_isis_spice=False, # If both fail, then don't parse the label, and just pass the driver a file. parsed_label = None + # If pvl label loading fails, try gdal + if not parsed_label: + try: + from osgeo import gdal + gdal.UseExceptions() + geodata = gdal.Open(label) + parsed_label = json.loads(geodata.GetMetadata("json:ISIS3")["doc"]) + except Exception as e: + if verbose: + logger.info("Gdal parse attempt failed with") + logger.info(e) + parsed_label = None + if verbose: if parsed_label: logger.info("Successfully pre-parsed label file") diff --git a/ale/drivers/ody_drivers.py b/ale/drivers/ody_drivers.py index 499f577da..b9e27fa7c 100644 --- a/ale/drivers/ody_drivers.py +++ b/ale/drivers/ody_drivers.py @@ -111,6 +111,13 @@ def start_time(self): else: # if not milliseconds, the units are probably seconds offset = offset.value + elif isinstance(offset, dict): + units = offset["unit"] + if "ms" in units.lower(): + offset = offset["value"] * 0.001 + else: + # if not milliseconds, the units are probably seconds + offset = offset["value"] return og_start_time + offset @@ -321,6 +328,13 @@ def start_time(self): else: # if not milliseconds, the units are probably seconds offset = offset.value + elif isinstance(offset, dict): + units = offset["unit"] + if "ms" in units.lower(): + offset = offset["value"] * 0.001 + else: + # if not milliseconds, the units are probably seconds + offset = offset["value"] return og_start_time + offset - (self.exposure_duration / 2) From 27f49cfa2295cba3aed4f9c29904c026af686a7d Mon Sep 17 00:00:00 2001 From: acpaquette Date: Fri, 13 Feb 2026 12:40:04 -0700 Subject: [PATCH 2/6] Update concrete drivers and add label test --- ale/base/label_isis.py | 106 ++++++++++++++++++------------ ale/drivers/apollo_drivers.py | 20 ++++-- ale/drivers/co_drivers.py | 27 ++++---- ale/drivers/isis_ideal_drivers.py | 9 ++- ale/drivers/lo_drivers.py | 20 ++++-- ale/drivers/lro_drivers.py | 10 ++- ale/drivers/mess_drivers.py | 7 +- ale/drivers/mgs_drivers.py | 4 +- ale/drivers/mro_drivers.py | 2 +- ale/drivers/nh_drivers.py | 6 +- environment.yml | 2 +- tests/pytests/test_load.py | 13 +++- 12 files changed, 150 insertions(+), 76 deletions(-) diff --git a/ale/base/label_isis.py b/ale/base/label_isis.py index f0cb5f14d..7e2611080 100644 --- a/ale/base/label_isis.py +++ b/ale/base/label_isis.py @@ -1,4 +1,6 @@ import pvl +import json +import datetime class IsisLabel(): """ @@ -21,7 +23,7 @@ def label(self): Raised when an invalid label is provided. """ if not hasattr(self, "_label"): - if isinstance(self._file, pvl.PVLModule): + if isinstance(self._file, pvl.PVLModule) or isinstance(self._file, dict): self._label = self._file return self._label else: @@ -40,13 +42,19 @@ def label(self): self._label = pvl.load(self._file, grammar=grammar) except: pass + + if not self._label: + try: + self._label = json.loads(self._file) + except: + pass if not self._label: try: from osgeo import gdal gdal.UseExceptions() - geodata = gdal.Open(label) - self._label = json.loads(geodata.GetMetadata("json:ISIS3")["doc"]) + geodata = gdal.Open(self._file) + self._label = json.loads(geodata.GetMetadata("json:ISIS3")[0]) except: pass @@ -266,7 +274,10 @@ def utc_start_time(self): : datetime Start time of the image in UTC """ - return self.label['IsisCube']['Instrument']['StartTime'] + utc_time = self.label['IsisCube']['Instrument']['StartTime'] + if isinstance(utc_time, str): + utc_time = datetime.datetime.fromisoformat(utc_time) + return utc_time @property def utc_stop_time(self): @@ -280,7 +291,10 @@ def utc_stop_time(self): : datetime Stop time of the image in UTC """ - return self.label['IsisCube']['Instrument']['StopTime'] + utc_time = self.label['IsisCube']['Instrument']['StopTime'] + if isinstance(utc_time, str): + utc_time = datetime.datetime.fromisoformat(utc_time) + return utc_time @property def exposure_duration(self): @@ -297,21 +311,23 @@ def exposure_duration(self): # Check for units on the PVL keyword if isinstance(exposure_duration, pvl.collections.Quantity): units = exposure_duration.units - if "ms" in units.lower() or 'milliseconds' in units.lower(): - exposure_duration = exposure_duration.value * 0.001 - else: - # if not milliseconds, the units are probably seconds - exposure_duration = exposure_duration.value + value = exposure_duration.value elif isinstance(exposure_duration, dict): - units = exposure_duration["units"] - if "ms" in units.lower() or 'milliseconds' in units.lower(): - exposure_duration = exposure_duration["value"] * 0.001 - else: - # if not milliseconds, the units are probably seconds - exposure_duration = exposure_duration["value"] + units = "" + value = exposure_duration["value"] + if "unit" in exposure_duration: + units = exposure_duration["unit"] else: # if no units are available, assume the exposure duration is given in milliseconds - exposure_duration = exposure_duration * 0.001 + units = "ms" + value = exposure_duration + + if "ms" in units.lower() or 'milliseconds' in units.lower(): + exposure_duration = value * 0.001 + else: + # if not milliseconds, the units are probably seconds + exposure_duration = value + return exposure_duration else: return self.line_exposure_duration @@ -327,23 +343,26 @@ def line_exposure_duration(self): Line exposure duration in seconds """ line_exposure_duration = self.label['IsisCube']['Instrument']['LineExposureDuration'] + # Check for units on the PVL keyword if isinstance(line_exposure_duration, pvl.collections.Quantity): units = line_exposure_duration.units - if "ms" in units.lower(): - line_exposure_duration = line_exposure_duration.value * 0.001 - else: - # if not milliseconds, the units are probably seconds - line_exposure_duration = line_exposure_duration.value + value = line_exposure_duration.value elif isinstance(line_exposure_duration, dict): - units = line_exposure_duration["unit"] - if "ms" in units.lower(): - line_exposure_duration = line_exposure_duration["value"] * 0.001 - else: - # if not milliseconds, the units are probably seconds - line_exposure_duration = line_exposure_duration["value"] + units = "" + value = line_exposure_duration["value"] + if "unit" in line_exposure_duration: + units = line_exposure_duration["unit"] else: # if no units are available, assume the exposure duration is given in milliseconds - line_exposure_duration = line_exposure_duration * 0.001 + units = "ms" + value = line_exposure_duration + + if "ms" in units.lower() or 'milliseconds' in units.lower(): + line_exposure_duration = value * 0.001 + else: + # if not milliseconds, the units are probably seconds + line_exposure_duration = value + return line_exposure_duration @@ -357,23 +376,26 @@ def interframe_delay(self): : float interframe delay in seconds """ - interframe_delay = self.label['IsisCube']['Instrument']['InterframeDelay'] + interframe_delay = self.label['IsisCube']['Instrument'].get('InterframeDelay', None) + if interframe_delay == None: + interframe_delay = self.label['IsisCube']['Instrument'].get('InterFrameDelay', None) if isinstance(interframe_delay, pvl.collections.Quantity): units = interframe_delay.units - if "ms" in units.lower(): - interframe_delay = interframe_delay.value * 0.001 - else: - # if not milliseconds, the units are probably seconds - interframe_delay = interframe_delay.value + value = interframe_delay.value elif isinstance(interframe_delay, dict): - units = interframe_delay["units"] - if "ms" in units.lower(): - interframe_delay = interframe_delay["value"] * 0.001 - else: - # if not milliseconds, the units are probably seconds - interframe_delay = interframe_delay["value"] + units = "ms" + value = interframe_delay["value"] + if "unit" in interframe_delay: + units = interframe_delay["unit"] else: # if no units are available, assume the interframe delay is given in milliseconds - interframe_delay = interframe_delay * 0.001 + units = "ms" + value = interframe_delay + + if "ms" in units.lower() or 'milliseconds' in units.lower(): + interframe_delay = interframe_delay.value * 0.001 + else: + # if not milliseconds, the units are probably seconds + interframe_delay = interframe_delay.value return interframe_delay diff --git a/ale/drivers/apollo_drivers.py b/ale/drivers/apollo_drivers.py index b52e2c9bd..be7a8c2d0 100644 --- a/ale/drivers/apollo_drivers.py +++ b/ale/drivers/apollo_drivers.py @@ -74,14 +74,22 @@ def exposure_duration(self): # Check for units on the PVL keyword if isinstance(exposure_duration, pvl.collections.Quantity): units = exposure_duration.units - if "ms" in units.lower() or 'milliseconds' in units.lower(): - exposure_duration = exposure_duration.value * 0.001 - else: - # if not milliseconds, the units are probably seconds - exposure_duration = exposure_duration.value + value = exposure_duration.value + elif isinstance(exposure_duration, dict): + units = "" + value = exposure_duration["value"] + if "unit" in exposure_duration: + units = exposure_duration["unit"] else: # if no units are available, assume the exposure duration is given in milliseconds - exposure_duration = exposure_duration * 0.001 + units = "ms" + value = exposure_duration + + if "ms" in units.lower() or 'milliseconds' in units.lower(): + exposure_duration = value * 0.001 + else: + # if not milliseconds, the units are probably seconds + exposure_duration = value return exposure_duration @property diff --git a/ale/drivers/co_drivers.py b/ale/drivers/co_drivers.py index 8c0e163c5..c1e6627be 100644 --- a/ale/drivers/co_drivers.py +++ b/ale/drivers/co_drivers.py @@ -375,11 +375,14 @@ def exposure_duration(self): exposure_duration = self.label['IsisCube']['Instrument']['ExposureDuration'] for i in exposure_duration: - if i.units == "VIS": - exposure_duration = i - - exposure_duration = exposure_duration.value * 0.001 - return exposure_duration + if isinstance(i, pvl.collections.Quantity): + if i.units == "VIS": + exposure_duration = i.value + elif isinstance(i, dict): + if i["unit"] == "VIS": + exposure_duration = i["value"] + + return exposure_duration * 0.001 else: return self.line_exposure_duration @@ -502,13 +505,15 @@ def exposure_duration(self): """ if 'ExposureDuration' in self.label['IsisCube']['Instrument']: exposure_duration = self.label['IsisCube']['Instrument']['ExposureDuration'] - for i in exposure_duration: - if i.units == "VIS": - exposure_duration = i - - exposure_duration = exposure_duration.value * 0.001 - return exposure_duration + if isinstance(exposure_duration, pvl.collections.Quantity): + if i.units == "VIS": + exposure_duration = i.value + elif isinstance(exposure_duration, dict): + if i["unit"] == "VIS": + exposure_duration = i["value"] + + return exposure_duration * 0.001 else: return self.line_exposure_duration diff --git a/ale/drivers/isis_ideal_drivers.py b/ale/drivers/isis_ideal_drivers.py index ff9a59575..51a16e965 100644 --- a/ale/drivers/isis_ideal_drivers.py +++ b/ale/drivers/isis_ideal_drivers.py @@ -37,8 +37,13 @@ def ephemeris_start_time(self): float : The image start ephemeris time """ - - return self.label.get('IsisCube').get('Instrument').get("EphemerisTime").value + if not hasattr(self, "_ephemeris_start_time"): + self._ephemeris_start_time = self.label['IsisCube']['Instrument']["EphemerisTime"] + if isinstance(self._ephemeris_start_time, pvl.collections.Quantity): + self._ephemeris_start_time = self._ephemeris_start_time.value + elif isinstance(self._ephemeris_start_time, dict): + self._ephemeris_start_time = self._ephemeris_start_time["value"] + return self._ephemeris_start_time @property diff --git a/ale/drivers/lo_drivers.py b/ale/drivers/lo_drivers.py index df727d5f0..4f8301e32 100644 --- a/ale/drivers/lo_drivers.py +++ b/ale/drivers/lo_drivers.py @@ -1,5 +1,7 @@ import numpy as np +import pvl import spiceypy as spice + from ale.base.data_naif import NaifSpice from ale.base.label_isis import IsisLabel from ale.base.type_sensor import Framer @@ -151,11 +153,21 @@ def naif_keywords(self): if (not hasattr(self, "_naif_keywords")): # From ISIS LoCameraFiducialMap + p_fidSamples = self.label['IsisCube']['Instrument']['FiducialSamples'] + p_fidLines = self.label['IsisCube']['Instrument']['FiducialLines'] + p_fidXCoords = self.label['IsisCube']['Instrument']['FiducialXCoordinates'] + p_fidYCoords = self.label['IsisCube']['Instrument']['FiducialYCoordinates'] # Read Fiducials - p_fidSamples = self.label['IsisCube']['Instrument']['FiducialSamples'].value - p_fidLines = self.label['IsisCube']['Instrument']['FiducialLines'].value - p_fidXCoords = self.label['IsisCube']['Instrument']['FiducialXCoordinates'].value - p_fidYCoords = self.label['IsisCube']['Instrument']['FiducialYCoordinates'].value + if isinstance(p_fidSamples, pvl.collections.Quantity): + p_fidSamples = p_fidSamples.value + p_fidLines = p_fidLines.value + p_fidXCoords = p_fidXCoords.value + p_fidYCoords = p_fidYCoords.value + elif isinstance(p_fidSamples, dict): + p_fidSamples = p_fidSamples["value"] + p_fidLines = p_fidLines["value"] + p_fidXCoords = p_fidXCoords["value"] + p_fidYCoords = p_fidYCoords["value"] # Create Affine Transformation p_src = [p_fidSamples, p_fidLines] diff --git a/ale/drivers/lro_drivers.py b/ale/drivers/lro_drivers.py index ce7d6aee0..42861d334 100644 --- a/ale/drivers/lro_drivers.py +++ b/ale/drivers/lro_drivers.py @@ -1,6 +1,6 @@ -import spiceypy as spice - import numpy as np +import pvl +import spiceypy as spice from pyspiceql import pyspiceql from ale.base import Driver @@ -756,7 +756,11 @@ def wavelength(self): """ # Get float value of frequency in GHz - frequency = self.label['IsisCube']['Instrument']['Frequency'].value + frequency = self.label['IsisCube']['Instrument']['Frequency'] + if isinstance(frequency, pvl.collections.Quantity): + frequency = frequency.value + elif isinstance(frequency, dict): + frequency = frequency["value"] #wavelength = spice.clight() / frequency / 1000.0 wavelength = 299792.458 / frequency / 1000.0 return wavelength diff --git a/ale/drivers/mess_drivers.py b/ale/drivers/mess_drivers.py index 9dd8cd4a4..570c50b81 100644 --- a/ale/drivers/mess_drivers.py +++ b/ale/drivers/mess_drivers.py @@ -1,4 +1,5 @@ import numpy as np +import pvl from pyspiceql import pyspiceql from ale.base import Driver @@ -347,7 +348,11 @@ def focal_length(self): f_t = np.poly1d(coeffs[::-1]) # eval at the focal_plane_temperature - self._focal_length = f_t(self.label['IsisCube']['Instrument']['FocalPlaneTemperature'].value) + focal_temp = self.label['IsisCube']['Instrument']['FocalPlaneTemperature'] + if isinstance(focal_temp, pvl.collections.Quantity): + self._focal_length = f_t(focal_temp.value) + elif isinstance(focal_temp, dict): + self._focal_length = f_t(focal_temp["value"]) return self._focal_length @property diff --git a/ale/drivers/mgs_drivers.py b/ale/drivers/mgs_drivers.py index 3671d8579..0daa9242b 100644 --- a/ale/drivers/mgs_drivers.py +++ b/ale/drivers/mgs_drivers.py @@ -53,7 +53,7 @@ def ephemeris_stop_time(self): the ephemeris stop time of the image, so compute the ephemeris stop time from the start time and the exposure duration. """ - return self.ephemeris_start_time + (self.exposure_duration/1000 * ((self.image_lines) * self.label['IsisCube']['Instrument']['DowntrackSumming'])) + return self.ephemeris_start_time + (self.exposure_duration * ((self.image_lines) * self.label['IsisCube']['Instrument']['DowntrackSumming'])) @property def detector_start_sample(self): @@ -186,7 +186,7 @@ def ephemeris_stop_time(self): the ephemeris stop time of the image, so compute the ephemeris stop time from the start time and the exposure duration. """ - return self.ephemeris_start_time + (self.exposure_duration/1000 * ((self.image_lines) * self.label['IsisCube']['Instrument']['DowntrackSumming'])) + return self.ephemeris_start_time + (self.exposure_duration * ((self.image_lines) * self.label['IsisCube']['Instrument']['DowntrackSumming'])) @property def detector_start_sample(self): diff --git a/ale/drivers/mro_drivers.py b/ale/drivers/mro_drivers.py index 4c9cb1ae7..52f36ce4f 100644 --- a/ale/drivers/mro_drivers.py +++ b/ale/drivers/mro_drivers.py @@ -101,7 +101,7 @@ def compute_marci_time(self, line): } self._filters = self.label["IsisCube"]["BandBin"]["FilterName"] - self._framelet_rate = self.label["IsisCube"]["Instrument"]["InterframeDelay"].value + self._framelet_rate = self.interframe_delay framelet_height = 16 self._actual_framelet_height = framelet_height / sum_mode diff --git a/ale/drivers/nh_drivers.py b/ale/drivers/nh_drivers.py index 9ea2f89af..906ec6488 100644 --- a/ale/drivers/nh_drivers.py +++ b/ale/drivers/nh_drivers.py @@ -343,8 +343,10 @@ def band_times(self): band_times = self.label['IsisCube']['BandBin']['UtcTime'] self._ephem_band_times = [] for time in band_times: - if type(time) is pvl.Quantity: - time = time.value + if isinstance(time, pvl.collections.Quantity): + time = time.value + elif isinstance(time, dict): + time = time["value"] self._ephem_band_times.append(self.spiceql_call("utcToEt", {"utc": time.strftime("%Y-%m-%d %H:%M:%S.%f")})) return self._ephem_band_times diff --git a/environment.yml b/environment.yml index 0bd139c1d..888e5de0f 100644 --- a/environment.yml +++ b/environment.yml @@ -8,7 +8,7 @@ dependencies: - brotli-python - cmake>=3.15 - eigen - - gdal + - gdal >=3.12.2 - jupyter - networkx - nlohmann_json diff --git a/tests/pytests/test_load.py b/tests/pytests/test_load.py index 35ecbf26f..fe3736174 100644 --- a/tests/pytests/test_load.py +++ b/tests/pytests/test_load.py @@ -13,7 +13,7 @@ from ale.drivers.mess_drivers import MessengerMdisPds3NaifSpiceDriver -from conftest import get_image_label, get_image_kernels, convert_kernels +from conftest import get_image_label, get_image_kernels, convert_kernels, get_isd, compare_dicts @pytest.fixture() def mess_kernels(): @@ -45,6 +45,17 @@ def test_mess_load(class_truth, return_val, mess_kernels): assert str(load_failure) == "No Such Driver for Label" assert return_val is False +def test_mess_load_gtiff(mess_kernels): + label_file = "tests/pytests/data/EN1072174528M/EN1072174528M.tiff" + + usgscsm_isd_str = ale.loads(label_file, {'kernels': mess_kernels}) + usgscsm_isd_obj = json.loads(usgscsm_isd_str) + + compare_isd = get_isd("messmdis") + + comparison = compare_dicts(usgscsm_isd_obj, compare_isd) + assert comparison == [] + def test_load_invalid_label(): with pytest.raises(Exception): ale.load('Not a label path') From 15e32296476cd51c19608e33eeb6f022abc969b6 Mon Sep 17 00:00:00 2001 From: acpaquette Date: Fri, 13 Feb 2026 12:42:13 -0700 Subject: [PATCH 3/6] Remove duplicate functions in util and update generate_kernels_from_cube to read gdal json --- ale/isd_generate.py | 2 +- ale/kernel_access.py | 55 +++--- ale/util.py | 293 ---------------------------- tests/pytests/test_kernel_access.py | 29 ++- tests/pytests/test_util.py | 127 ------------ 5 files changed, 59 insertions(+), 447 deletions(-) diff --git a/ale/isd_generate.py b/ale/isd_generate.py index b8bc69444..f2acab4fd 100755 --- a/ale/isd_generate.py +++ b/ale/isd_generate.py @@ -155,7 +155,7 @@ def main(): k = None else: try: - k = ale.util.generate_kernels_from_cube(args.kernel, expand=True) + k = ale.kernel_access.generate_kernels_from_cube(args.kernel, expand=True) except (KeyError, pvl.exceptions.LexerError): k = [args.kernel, ] diff --git a/ale/kernel_access.py b/ale/kernel_access.py index 5705ad08b..3312ee3d7 100644 --- a/ale/kernel_access.py +++ b/ale/kernel_access.py @@ -5,9 +5,11 @@ from os import path import re import warnings +from collections.abc import Iterable import numpy as np import pvl +import json from ale import spice_root from ale.util import get_isis_preferences @@ -130,16 +132,25 @@ def generate_kernels_from_cube(cube, expand=False, format_as='list'): Dictionary of lists of kernels with the keys being the Keywords from the Kernels group of cube itself, and the values being the values associated with that Keyword in the cube. """ - # enforce key order - mk_paths = OrderedDict.fromkeys( - ['TargetPosition', 'InstrumentPosition', - 'InstrumentPointing', 'Frame', 'TargetAttitudeShape', - 'Instrument', 'InstrumentAddendum', 'LeapSecond', - 'SpacecraftClock', 'Extra']) - # just work with full path cube = os.path.abspath(cube) - cubelabel = pvl.load(cube) + cubelabel = None + try: + cubelabel = pvl.load(cube) + except: + cubelabel = None + + if (cubelabel == None): + try: + from osgeo import gdal + gdal.UseExceptions() + geodata = gdal.Open(cube) + cubelabel = json.loads(geodata.GetMetadata("json:ISIS3")[0]) + except Exception as e: + cubelabel = None + + if (cubelabel == None): + raise RuntimeError(f"Could not parse {cube} for pvl or json label") try: kernel_group = cubelabel['IsisCube'] @@ -155,33 +166,28 @@ def get_kernels_from_isis_pvl(kernel_group, expand=True, format_as="list"): ['TargetPosition', 'InstrumentPosition', 'InstrumentPointing', 'Frame', 'TargetAttitudeShape', 'Instrument', 'InstrumentAddendum', 'LeapSecond', - 'SpacecraftClock', 'Extra']) - + 'SpacecraftClock', 'Extra', 'Clock', 'ShapeModel']) if isinstance(kernel_group, str): kernel_group = pvl.loads(kernel_group) kernel_group = kernel_group["Kernels"] - def load_table_data(key): + def read_kernels(key): mk_paths[key] = kernel_group.get(key, None) - if isinstance(mk_paths[key], str): + if (mk_paths[key] == "Null"): + mk_paths[key] = None + if isinstance(mk_paths[key], str) or mk_paths[key] == None: mk_paths[key] = [mk_paths[key]] while 'Table' in mk_paths[key]: mk_paths[key].remove('Table') while 'Nadir' in mk_paths[key]: mk_paths[key].remove('Nadir') - load_table_data('TargetPosition') - load_table_data('InstrumentPosition') - load_table_data('InstrumentPointing') - load_table_data('TargetAttitudeShape') - # the rest - mk_paths['Frame'] = [kernel_group.get('Frame', None)] - mk_paths['Instrument'] = [kernel_group.get('Instrument', None)] - mk_paths['InstrumentAddendum'] = [kernel_group.get('InstrumentAddendum', None)] - mk_paths['SpacecraftClock'] = [kernel_group.get('SpacecraftClock', None)] - mk_paths['LeapSecond'] = [kernel_group.get('LeapSecond', None)] - mk_paths['Clock'] = [kernel_group.get('Clock', None)] - mk_paths['Extra'] = [kernel_group.get('Extra', None)] + for key in mk_paths.keys(): + read_kernels(key) + + if (mk_paths['ShapeModel'][0]): + if (os.path.splitext(mk_paths['ShapeModel'][0])[-1] != "bds"): + mk_paths['ShapeModel'] = [None] # handles issue with OsirisRex instrument kernels being in a 2d list if isinstance(mk_paths['Instrument'][0], list): @@ -218,6 +224,7 @@ def load_table_data(key): return mk_paths elif (format_as == 'spiceql'): mk_paths.pop("Clock") + mk_paths.pop("ShapeModel") mk_paths["ck"] = [k.replace("$", "") for k in mk_paths.pop("InstrumentPointing") if k] mk_paths["spk"] = [k.replace("$", "") for k in mk_paths.pop("InstrumentPosition") if k] mk_paths["pck"] = [k.replace("$", "") for k in mk_paths.pop("TargetAttitudeShape") if k] diff --git a/ale/util.py b/ale/util.py index aad9fcd54..da1547f19 100644 --- a/ale/util.py +++ b/ale/util.py @@ -316,184 +316,6 @@ def replace_var(m): return path -def generate_kernels_from_cube(cube, expand=False, format_as='list'): - """ - Parses a cube label to obtain the kernels from the Kernels group. - - Parameters - ---------- - cube : cube - Path to the cube to pull the kernels from. - expand : bool, optional - Whether or not to expand variables within kernel paths based on your IsisPreferences file. - See :func:`get_isis_preferences` for how the IsisPreferences file is found. - format_as : str, optional {'list', 'dict'} - How to return the kernels: either as a one-dimensional ordered list, or as a dictionary - of kernel lists. - - Returns - ------- - : list - One-dimensional ordered list of all kernels from the Kernels group in the cube. - : Dictionary - Dictionary of lists of kernels with the keys being the Keywords from the Kernels group of - cube itself, and the values being the values associated with that Keyword in the cube. - """ - # just work with full path - cube = os.path.abspath(cube) - cubelabel = pvl.load(cube) - - try: - kernel_group = cubelabel['IsisCube'] - except KeyError: - raise KeyError(f'{cubelabel}, Could not find kernels group, input cube [{cube}] may not be spiceinited') - - return get_kernels_from_isis_pvl(kernel_group, expand, format_as) - -def get_kernels_from_isis_pvl(kernel_group, expand=True, format_as="list"): - """ - Extract kernels from ISIS PVL. - - Parameters - ---------- - kernel_group : str - The target kernel group to extract - expand : bool, optional - True if values of environment variables should be expanded, by default True - format_as : str, optional - Desired output format, by default "list" - - Returns - ------- - list|str|obj - The extracted kernels in the user-specified format - - Raises - ------ - Exception - Raised if the user specifies an invalid or unsupported format. - """ - # enforce key order - mk_paths = OrderedDict.fromkeys( - ['TargetPosition', 'InstrumentPosition', - 'InstrumentPointing', 'Frame', 'TargetAttitudeShape', - 'Instrument', 'InstrumentAddendum', 'LeapSecond', - 'SpacecraftClock', 'Extra', 'ShapeModel']) - if isinstance(kernel_group, str): - kernel_group = pvl.loads(kernel_group) - - kernel_group = kernel_group["Kernels"] - - def load_table_data(key): - mk_paths[key] = kernel_group.get(key, None) - if isinstance(mk_paths[key], str): - mk_paths[key] = [mk_paths[key]] - while 'Table' in mk_paths[key]: mk_paths[key].remove('Table') - while 'Nadir' in mk_paths[key]: mk_paths[key].remove('Nadir') - - load_table_data('TargetPosition') - load_table_data('InstrumentPosition') - load_table_data('InstrumentPointing') - load_table_data('TargetAttitudeShape') - # the rest - mk_paths['Frame'] = [kernel_group.get('Frame', None)] - mk_paths['Instrument'] = [kernel_group.get('Instrument', None)] - mk_paths['InstrumentAddendum'] = [kernel_group.get('InstrumentAddendum', None)] - mk_paths['SpacecraftClock'] = [kernel_group.get('SpacecraftClock', None)] - mk_paths['LeapSecond'] = [kernel_group.get('LeapSecond', None)] - mk_paths['Clock'] = [kernel_group.get('Clock', None)] - mk_paths['Extra'] = [kernel_group.get('Extra', None)] - mk_paths['ShapeModel'] = [kernel_group.get('ShapeModel', None)] - if (mk_paths['ShapeModel'][0]): - if (os.path.splitext(mk_paths['ShapeModel'][0])[-1] != "bds"): - mk_paths['ShapeModel'] = [None] - - # handles issue with OsirisRex instrument kernels being in a 2d list - if isinstance(mk_paths['Instrument'][0], list): - mk_paths['Instrument'] = np.concatenate(mk_paths['Instrument']).flat - - if (format_as == 'list'): - # get kernels as 1-d string list - kernels = [] - for kernel in chain.from_iterable(mk_paths.values()): - if isinstance(kernel, str): - kernels.append(kernel) - elif isinstance(kernel, list): - kernels.extend(kernel) - if expand: - isisprefs = get_isis_preferences() - if not "DataDirectory" in isisprefs: - warnings.warn("No IsisPreferences file found, is your ISISROOT env var set?") - - kernels = [expandvars(k, isisprefs['DataDirectory'], case_sensitive=False) for k in kernels] - # Ensure that the ISIS Addendum kernel is last in case it overrides - # some values from the default Instrument kernel - # Sorts planetary constants kernel first so it can be overridden by more specific kernels - kernels = sorted(kernels, key=lambda x: "Addendum" in x) - kernels = sorted(kernels, key=lambda x: "pck00" in x, reverse=True) - return kernels - elif (format_as == 'dict'): - # return created dict - if expand: - isisprefs = get_isis_preferences() - for kern_list in mk_paths: - for index, kern in enumerate(mk_paths[kern_list]): - if kern is not None: - mk_paths[kern_list][index] = expandvars(kern, isisprefs['DataDirectory'], case_sensitive=False) - return mk_paths - else: - raise Exception(f'{format_as} is not a valid return format') - -def write_metakernel_from_cube(cube, mkpath=None): - """ - Create a metakernel from a spiceinit'd cube. - - Parameters - ---------- - cube : str - The string filename of the cube from which to generate a metakernel. - mkpath : str, optional - The path to the output metakernel or None if not written to disk, by default None - - Returns - ------- - str - The text of the generated metakernel. - """ - # add ISISPREF paths as path_symbols and path_values to avoid custom expand logic - pvlprefs = get_isis_preferences() - - kernels = generate_kernels_from_cube(cube) - - # make sure kernels are mk strings - kernels = ["'"+k+"'" for k in kernels] - - paths = OrderedDict(pvlprefs['DataDirectory']) - path_values = ["'"+os.path.expandvars(path)+"'" for path in paths.values()] - path_symbols = ["'"+symbol.lower()+"'" for symbol in paths.keys()] - - body = '\n\n'.join([ - 'KPL/MK', - f'Metakernel Generated from an ISIS cube: {cube}', - '\\begindata', - 'PATH_VALUES = (', - '\n'.join(path_values), - ')', - 'PATH_SYMBOLS = (', - '\n'.join(path_symbols), - ')', - 'KERNELS_TO_LOAD = (', - '\n'.join(kernels), - ')', - '\\begintext' - ]) - - if mkpath is not None: - with open(mkpath, 'w') as f: - f.write(body) - - return body - def get_ck_frames(kernel): """ Get all of the reference frames defined in a kernel. @@ -904,121 +726,6 @@ def search_isis_db(dbobj, labelobj, isis_data): return kernels -def expandvars(path, env_dict=os.environ, default=None, case_sensitive=True): - if env_dict != os.environ: - env_dict = dict_merge(env_dict, os.environ) - - while "$" in path: - user_dict = env_dict if case_sensitive else dict_to_lower(env_dict) - - def replace_var(m): - group1 = m.group(1) if case_sensitive else m.group(1).lower() - val = user_dict.get(m.group(2) or group1 if default is None else default) - if not val: - raise KeyError(f"Failed to evaluate {m.group(0)} from env_dict. " + - f"Should {m.group(0)} be an environment variable?") - - return val - reVar = r'\$(\w+|\{([^}]*)\})' - path = re.sub(reVar, replace_var, path) - return path - -def find_kernels(cube, isis_data, format_as=dict): - """ - Find all kernels for a cube and return a json object with categorized kernels. - - Parameters - ---------- - - cube : str - Path to an ISIS cube - - isis_data : str - path to $ISISDATA - - format_as : obj - What type to return the kernels as, ISIS3-like dict/PVL or flat list - - Returns - ------- - : obj - Container with kernels - """ - def remove_dups(listofElements): - # Create an empty list to store unique elements - uniqueList = [] - - # Iterate over the original list and for each element - # add it to uniqueList, if its not already there. - for elem in listofElements: - if elem not in uniqueList: - uniqueList.append(elem) - - # Return the list of unique elements - return uniqueList - - cube_label = pvl.load(cube) - mission_lookup_table = get_isis_mission_translations(isis_data) - - mission_dir = mission_lookup_table[cube_label["IsisCube"]["Instrument"]["SpacecraftName"]] - mission_dir = path.join(isis_data, mission_dir.lower()) - - kernel_dir = path.join(mission_dir, "kernels") - base_kernel_dir = path.join(isis_data, "base", "kernels") - - kernel_types = [ name for name in os.listdir(kernel_dir) if os.path.isdir(os.path.join(kernel_dir, name)) ] - kernel_types.extend(name for name in os.listdir(base_kernel_dir) if os.path.isdir(os.path.join(base_kernel_dir, name))) - kernel_types = set(kernel_types) - - db_files = [] - for typ in kernel_types: - files = sorted(glob(path.join(kernel_dir, typ, "*.db"))) - base_files = sorted(glob(path.join(base_kernel_dir, typ, "*.db"))) - files = [list(it) for k,it in groupby(files, key=lambda f:os.path.basename(f).split(".")[0])] - base_files = [list(it) for k,it in groupby(base_files, key=lambda f:os.path.basename(f).split(".")[0])] - - for instrument_dbs in files: - db_files.append(read_pvl(sorted(instrument_dbs)[-1], True)) - for base_dbs in base_files: - db_files.append(read_pvl(sorted(base_dbs)[-1], True)) - - - kernels = {} - for f in db_files: - #TODO: Error checking - typ = f[0][0] - kernel_search_results = search_isis_db(f[0][1], cube_label, isis_data) - - if not kernel_search_results: - kernels[typ] = None - else: - try: - kernels[typ]["kernels"].extend(kernel_search_results["kernels"]) - if any(kernel_search_results.get("types", [None])): - kernels[typ]["types"].extend(kernel_search_results["types"]) - except: - kernels[typ] = {} - kernels[typ]["kernels"] = kernel_search_results["kernels"] - if any(kernel_search_results.get("types", [None])): - kernels[typ]["types"] = kernel_search_results["types"] - - for k,v in kernels.items(): - if v: - kernels[k]["kernels"] = remove_dups(v["kernels"]) - - if format_as == dict: - return kernels - elif format_as == list: - kernel_list = [] - for _,kernels in kernels.items(): - if kernels: - kernel_list.extend(kernels["kernels"]) - return kernel_list - else: - warnings.warn(f"{format_as} is not a valid format, returning as dict") - return kernels - - def merge_kernels(dict1, dict2, strategy='combine'): """ Merge two dictionaries with configurable conflict resolution strategies. diff --git a/tests/pytests/test_kernel_access.py b/tests/pytests/test_kernel_access.py index 09e3d1a99..fa83fef43 100644 --- a/tests/pytests/test_kernel_access.py +++ b/tests/pytests/test_kernel_access.py @@ -135,7 +135,19 @@ def test_kernel_from_cube_dict(cube_kernels): cube.write(cube_kernels) cube.flush() kernels = kernel_access.generate_kernels_from_cube(cube.name, format_as='dict') - assert kernels == OrderedDict([('TargetPosition', ['$messenger/targetposition0', '$messenger/targetposition1']), ('InstrumentPosition', ['$messenger/instrumentposition']), ('InstrumentPointing', ['$messenger/instrumentpointing0', '$messenger/instrumentpointing1']), ('Frame', [None]), ('TargetAttitudeShape', ['$base/attitudeshape']), ('Instrument', ['$messenger/instrument']), ('InstrumentAddendum', [None]), ('LeapSecond', [None]), ('SpacecraftClock', ['$base/clock']), ('Extra', [None]), ('Clock', [None])]) + expected_dict = OrderedDict([('TargetPosition', ['$messenger/targetposition0', '$messenger/targetposition1']), + ('InstrumentPosition', ['$messenger/instrumentposition']), + ('InstrumentPointing', ['$messenger/instrumentpointing0', '$messenger/instrumentpointing1']), + ('Frame', [None]), + ('TargetAttitudeShape', ['$base/attitudeshape']), + ('Instrument', ['$messenger/instrument']), + ('InstrumentAddendum', [None]), + ('LeapSecond', [None]), + ('SpacecraftClock', ['$base/clock']), + ('Extra', [None]), + ('Clock', [None]), + ('ShapeModel', [None])]) + assert kernels == expected_dict def test_kernel_from_cube_dict_expanded(monkeypatch, tmpdir, pvl_four_group, cube_kernels): with patch.dict('os.environ', {'ISISROOT': str(tmpdir), 'ISIS3DATA': '$ISISDATA', 'ISISDATA': '/test/path'}): @@ -148,7 +160,20 @@ def test_kernel_from_cube_dict_expanded(monkeypatch, tmpdir, pvl_four_group, cub cube.write(cube_kernels) cube.flush() kernels = kernel_access.generate_kernels_from_cube(cube.name, expand=True, format_as='dict') - assert kernels == OrderedDict([('TargetPosition', ['/test/path/messenger/targetposition0', '/test/path/messenger/targetposition1']), ('InstrumentPosition', ['/test/path/messenger/instrumentposition']), ('InstrumentPointing', ['/test/path/messenger/instrumentpointing0', '/test/path/messenger/instrumentpointing1']), ('Frame', [None]), ('TargetAttitudeShape', ['/test/path/base/attitudeshape']), ('Instrument', ['/test/path/messenger/instrument']), ('InstrumentAddendum', [None]), ('LeapSecond', [None]), ('SpacecraftClock', ['/test/path/base/clock']), ('Extra', [None]), ('Clock', [None])]) + print(kernels.keys()) + expected_dict = OrderedDict([('TargetPosition', ['/test/path/messenger/targetposition0', '/test/path/messenger/targetposition1']), + ('InstrumentPosition', ['/test/path/messenger/instrumentposition']), + ('InstrumentPointing', ['/test/path/messenger/instrumentpointing0', '/test/path/messenger/instrumentpointing1']), + ('Frame', [None]), + ('TargetAttitudeShape', ['/test/path/base/attitudeshape']), + ('Instrument', ['/test/path/messenger/instrument']), + ('InstrumentAddendum', [None]), + ('LeapSecond', [None]), + ('SpacecraftClock', ['/test/path/base/clock']), + ('Extra', [None]), + ('Clock', [None]), + ('ShapeModel', [None])]) + assert kernels == expected_dict def test_kernel_from_cube_no_kernel_group(): with pytest.raises(KeyError): diff --git a/tests/pytests/test_util.py b/tests/pytests/test_util.py index c8f075e41..cd725372f 100644 --- a/tests/pytests/test_util.py +++ b/tests/pytests/test_util.py @@ -97,133 +97,6 @@ def test_pvl_parser(pvl_three_group): assert obj["Test"]["t"] == "t3" assert obj["Settings"]["delsystem32"] == "yes" - -def test_find_kernels(cube_kernels, tmpdir): - ck_db = """ - Object = Pointing - Group = Selection - Time = ( "2016 JAN 01 00:00:00.000000 TDB", "2016 DEC 31 00:00:00.000000 TDB" ) - Type = Reconstructed - File = $MRO/fake - End_Group - End_Object - """ - - ik_db = """ - Object = instrument - Group = Selection - Match = ("Instrument", "InstrumentId", "fake") - File = ("fake", "not/a/real/file") - End_Group - End_Object - """ - translation = """ - Group = MissionName - InputKey = SpacecraftName - InputGroup = "IsisCube,Instrument" - InputPosition = (IsisCube, Instrument) - Translation = (fake, "fake") - End_Group - """ - - tmpdir.mkdir("fake").mkdir("kernels").mkdir("ik") - tmpdir.mkdir("base").mkdir("kernels").mkdir("ck") - tmpdir.mkdir("base", "translations") - - ck_db_file = tmpdir.join("base", "kernels", "ck", "kernel.01.db") - ik_db_file = tmpdir.join("fake", "kernels", "ik", "kernel.01.db") - translation_file = tmpdir.join("base", "translations", "MissionName2DataDir.trn") - cube_file = tmpdir.join("test.cub") - - with open(translation_file, "w") as f: - f.write(translation) - - with open(ck_db_file, "w") as f: - f.write(ck_db) - - with open(ik_db_file, "w") as f: - f.write(ik_db) - - with open(cube_file, "w") as cube: - cube.write(cube_kernels) - - print(pvl.load(str(cube_file))) - kernels = util.find_kernels(str(cube_file), str(tmpdir)) - assert kernels == {'Pointing': {'kernels': [str(tmpdir / 'MRO/fake')], 'types': ['Reconstructed']}, 'instrument': {'kernels': [str(tmpdir / 'fake/not/a/real/file')]}} - - -def test_kernel_from_cube_list(cube_kernels): - with tempfile.NamedTemporaryFile('r+') as cube: - cube.write(cube_kernels) - cube.flush() - kernels = util.generate_kernels_from_cube(cube.name) - assert kernels == ['$messenger/targetposition0', '$messenger/targetposition1','$messenger/instrumentposition', '$messenger/instrumentpointing0', '$messenger/instrumentpointing1', '$base/attitudeshape', '$messenger/instrument', '$base/clock'] - -def test_kernel_from_cube_list_expanded(monkeypatch, tmpdir, pvl_four_group, cube_kernels): - monkeypatch.setenv('ISISROOT', str(tmpdir)) - monkeypatch.setenv('ISISDATA', '/test/path') - - with open(tmpdir.join('IsisPreferences'), 'w+') as pvl_isisroot_file: - pvl_isisroot_file.write(pvl_four_group) - pvl_isisroot_file.flush() - - with tempfile.NamedTemporaryFile('r+') as cube: - cube.write(cube_kernels) - cube.flush() - kernels = util.generate_kernels_from_cube(cube.name, expand=True) - assert kernels == ['/test/path/messenger/targetposition0', '/test/path/messenger/targetposition1', '/test/path/messenger/instrumentposition', '/test/path/messenger/instrumentpointing0', '/test/path/messenger/instrumentpointing1', '/test/path/base/attitudeshape', '/test/path/messenger/instrument', '/test/path/base/clock'] - -def test_kernel_from_cube_dict(cube_kernels): - with tempfile.NamedTemporaryFile('r+') as cube: - cube.write(cube_kernels) - cube.flush() - kernels = util.generate_kernels_from_cube(cube.name, format_as='dict') - assert kernels == OrderedDict([('TargetPosition', ['$messenger/targetposition0', '$messenger/targetposition1']), - ('InstrumentPosition', ['$messenger/instrumentposition']), - ('InstrumentPointing', ['$messenger/instrumentpointing0', '$messenger/instrumentpointing1']), - ('Frame', [None]), - ('TargetAttitudeShape', ['$base/attitudeshape']), - ('Instrument', ['$messenger/instrument']), - ('InstrumentAddendum', [None]), - ('LeapSecond', [None]), - ('SpacecraftClock', ['$base/clock']), - ('Extra', [None]), - ('ShapeModel', [None]), - ('Clock', [None])]) - -def test_kernel_from_cube_dict_expanded(monkeypatch, tmpdir, pvl_four_group, cube_kernels): - monkeypatch.setenv('ISISROOT', str(tmpdir)) - monkeypatch.setenv('ISISDATA', '/test/path') - - with open(tmpdir.join('IsisPreferences'), 'w+') as pvl_isisroot_file: - pvl_isisroot_file.write(pvl_four_group) - pvl_isisroot_file.flush() - - with tempfile.NamedTemporaryFile('r+') as cube: - cube.write(cube_kernels) - cube.flush() - kernels = util.generate_kernels_from_cube(cube.name, expand=True, format_as='dict') - print(kernels.keys()) - assert kernels == OrderedDict([('TargetPosition', ['/test/path/messenger/targetposition0', '/test/path/messenger/targetposition1']), - ('InstrumentPosition', ['/test/path/messenger/instrumentposition']), - ('InstrumentPointing', ['/test/path/messenger/instrumentpointing0', '/test/path/messenger/instrumentpointing1']), - ('Frame', [None]), - ('TargetAttitudeShape', ['/test/path/base/attitudeshape']), - ('Instrument', ['/test/path/messenger/instrument']), - ('InstrumentAddendum', [None]), - ('LeapSecond', [None]), - ('SpacecraftClock', ['/test/path/base/clock']), - ('Extra', [None]), - ('ShapeModel', [None]), - ('Clock', [None])]) - -def test_kernel_from_cube_no_kernel_group(): - with pytest.raises(KeyError): - with tempfile.NamedTemporaryFile('w+') as cube: - cube.write('') - cube.flush() - util.generate_kernels_from_cube(cube.name) - def test_get_preferences_arg(tmpdir, pvl_one_group): with open(tmpdir.join('IsisPrefrences'), 'w+') as pvl_file: pvl_file.write(pvl_one_group) From 160887193d9d04aa0521a88b6c7f4a1f14014911 Mon Sep 17 00:00:00 2001 From: acpaquette Date: Fri, 13 Feb 2026 12:43:01 -0700 Subject: [PATCH 4/6] Missed update in driver_verification script --- ale/driver_verification.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ale/driver_verification.py b/ale/driver_verification.py index 9ddbe12ee..06daf798b 100644 --- a/ale/driver_verification.py +++ b/ale/driver_verification.py @@ -320,7 +320,7 @@ def main(image): run_spiceinit_isis(image_isis_path) # try ale.loads - isis_kerns = ale.util.generate_kernels_from_cube(image_isis_path, expand=True) + isis_kerns = ale.kernel_access.generate_kernels_from_cube(image_isis_path, expand=True) # this can be uncommented and used when the PVL loads fix PR goes in (#587) isis_label = pvl.load(image_isis_path) try: @@ -333,7 +333,7 @@ def main(image): run_spiceinit_ale(image_ale_path) # try ale.loads - ale_kerns = ale.util.generate_kernels_from_cube(image_ale_path, expand=True) + ale_kerns = ale.kernel_access.generate_kernels_from_cube(image_ale_path, expand=True) ale.loads(image_ale_path, props={"kernels": ale_kerns}, only_naif_spice=True) # Generate ISD for both ALE and ISIS From a2683aadf740a01d426ef40f336f4b024009e1a7 Mon Sep 17 00:00:00 2001 From: acpaquette Date: Fri, 13 Feb 2026 12:56:26 -0700 Subject: [PATCH 5/6] Fix leftover tests and changes after rebase --- tests/pytests/data/isds/mgsmocna_isd.json | 2 +- tests/pytests/data/isds/mgsmocwa_isd.json | 2 +- tests/pytests/test_load.py | 3 ++- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/pytests/data/isds/mgsmocna_isd.json b/tests/pytests/data/isds/mgsmocna_isd.json index 267cf4618..7ef352e27 100644 --- a/tests/pytests/data/isds/mgsmocna_isd.json +++ b/tests/pytests/data/isds/mgsmocna_isd.json @@ -15,7 +15,7 @@ [ 0.5, -2.9620223995298147, - 0.4821 + 0.0004821 ] ], "starting_ephemeris_time": -11224002.182053322, diff --git a/tests/pytests/data/isds/mgsmocwa_isd.json b/tests/pytests/data/isds/mgsmocwa_isd.json index d54fc0042..0bc2f3981 100644 --- a/tests/pytests/data/isds/mgsmocwa_isd.json +++ b/tests/pytests/data/isds/mgsmocwa_isd.json @@ -15,7 +15,7 @@ [ 0.5, -153.59999999403954, - 100.0 + 0.1 ] ], "starting_ephemeris_time": -69382819.36051902, diff --git a/tests/pytests/test_load.py b/tests/pytests/test_load.py index fe3736174..247cc7f74 100644 --- a/tests/pytests/test_load.py +++ b/tests/pytests/test_load.py @@ -48,8 +48,9 @@ def test_mess_load(class_truth, return_val, mess_kernels): def test_mess_load_gtiff(mess_kernels): label_file = "tests/pytests/data/EN1072174528M/EN1072174528M.tiff" - usgscsm_isd_str = ale.loads(label_file, {'kernels': mess_kernels}) + usgscsm_isd_str = ale.loads(label_file, {'kernels': mess_kernels, "attach_kernels": False}) usgscsm_isd_obj = json.loads(usgscsm_isd_str) + print(usgscsm_isd_obj) compare_isd = get_isd("messmdis") From bb633276f56ff115daab53d141b485539dde7950 Mon Sep 17 00:00:00 2001 From: acpaquette Date: Fri, 13 Feb 2026 13:03:19 -0700 Subject: [PATCH 6/6] Add geotiff and other gtiff tests --- .../data/EN1072174528M/EN1072174528M.tiff | Bin 0 -> 46674 bytes tests/pytests/test_gtiff_label.py | 60 ++++++++++++++++++ 2 files changed, 60 insertions(+) create mode 100644 tests/pytests/data/EN1072174528M/EN1072174528M.tiff create mode 100644 tests/pytests/test_gtiff_label.py diff --git a/tests/pytests/data/EN1072174528M/EN1072174528M.tiff b/tests/pytests/data/EN1072174528M/EN1072174528M.tiff new file mode 100644 index 0000000000000000000000000000000000000000..bf026a7737f7109a61f8c94bf0fbb0f109776b65 GIT binary patch literal 46674 zcmeHwOLOB`c4oHQ;h5-%S1XXnD)g&{k%T5P_l8~9~ zl1Md4rCoM(gzc^WhF7zn`F-!sx z&;IMPXTSL6vtQsy{%Oa5lpc3*?U&m5Khci=+b{lD#-0DaXV3mrJOAIckxLIF+(YaaQEtacafAPERdUZUWj;Hn)U;Ptq{9lW^`@j1A!+Q5uYZd3_3Q-Nsur=-4%_>RV5jtHU=Amv`}MyIikykQsAy zy6ZXrX0v{{R|I9m=z|-n)^BW}S--8nSl#FZnE2ZBqJ?152(z=<|f{#+cbSR-Cn8lmBSZXO<|6Wu2FT@*~9H_d4IdiVKKW##xwbw za!%nJueQ6*1C&G8)r@!QwnwXl8almST*ud&#p~TUyo5Thao{Z8ZsXM(o$_pk>V8FO zJ)CGVcrp2|Gkg)AcE)Gn$@u)FGyQHli_SXd;be00Z6`cGIgL7l@GP2y3U(8=@r}Aq zr+GM@9-W5+b+HU&w%ELho2I?Fe%Sob1mR+{e6w6FZVy39Cf)=|eAf(aK;5Lx#xhLz z$Z(D>#&u_Y0-#-$he+cAWTL!o5>+c8ahn!|AYOGh!4a2ug z-*K(rxEJ&c!%$btw4N=u^naIcVN3@V=wi|z&pO{l;iTg>0ge9Qe!YFz#KVWpVh8Bz z-ZZ4;!`i#W?SsyLGb2-X77weXZi@=!*;E~8Mx6mrS2B`n-BtKvcA;)B0~p3TMA-Fa z7T?{+m=>m|Zf`ZMBh&19`)Ji_Zo}wAv!wdjjn>zT+soU
?RoUXAC*dLa-P4Z2*uTkooFcPkLwKlGQY)pGUbY<;8a?cUUMxJVhQ z@7@Pkb@%k)YP3NBE5NO|^RE}n+x}vsCnFVi4euXnBFpgd#ZjY%2t&fv8$FF|Wthku z9MkLtzHK;O;P3g@PD|o9n^?~@>#g*~>h@u`i}k=+!T-~T-FWpb-q0=27FW61D8B#s zU6vfYdJfxPUL~IP)$`LAv-$Y^Wi*++dVYprjL-NuH@dU1uZCA=J>lm1?ed+jA0IU3 z8KO9v#=9E%o<-B?QS|Eh%U938J|g8u)=@N^98JP+hv6(ddWob>O`xpfgr1sU4F-fl z{AVVA@Vg1xyC>0;$jjn^+iD7^*19GEv)@nZdE;9$Sqm zsK(MCCBx09qk`hY<)o@Kl4O_ZbZg+ucN+~mO9T~bk1L$;U@)vsHB8g)l?9pABS|1X zf4IBSi?-v6ICrtRTJB0WI!54?p??=IHp8`^SWp{G(<$Ft?@_8u>3~nrtw|3higl@Z z{#|uAVV?qt#N2m+PXDswY8tzb%L>*LYU$d-r2=!AXC*0AxTpa-0XdOofl=pCv5xT_ zzx%k9`dstODQ_#9I^QmDc5fBq_PlJO#kIP7&C>6yEvwp*GQVE0c4%zIoB67)jghK> zBDh*xzKJ)h>Ry!^`6L}#z8<4+&ZWOcmhA;rg?XOFi~DJe$?3_OqT|o6P%giE{(VC7 ztLL}d?_WJ%Etap5URd3o-f=7IThg@1s)p&?MNLmvPx1Dc?yrFfzu@IeV}7@NvzfoM z?C$RVT2WVrvZ7e1@(P8dGlQmUO(ki&?LBC_i5<({cCWVgilEF*WT4a))xEl0FIPJh zDC*krha`?{6Kt-hB|DZ|($W?}`@)GyI}Q83G({0w`}AWgNHuWuaN++NSE$6G~sa}%#_9`4kP4smI@ zAd{e-*-*pi9g+xL5DR#%U$3J{oZU@)w?#=gJ(2z_G<9w1@sO$AX@6f6r*W~nm3m4_QG6CZ7=VeAJ64Deyop^tmYIC(F z{E65V`l=O2)3hwZP4g{vYl#PG)~xQJs%`QMYDIhm@~D+61)UY$FusQsBZZV!nJ$sc z1xX^(+b1H)$Gxt}#~^nV@jtVLWce>GXcgiC?PqZar%HYViGrF^BuZNgd$QgY8~oXf z&{Q)_FYw%++cQ1ScCB7-UpX0FeCTJ{RYY4 zXjN=2v>uujsMF;?>g~OBE=p-cLD_^UB+4ll)s4e=d%anrC!bs0QP-uR5#MyKe&}rP zm)G$!w|rBaU^(~C)JQ?B%s%${SYuk(7RV!#x<`R*m(m>oc_5)H+308)-`>ps8tI7f z8?7U z!@wR54a@5H9p6J8*z@d>ANBqIz_5FkZAXLt2=!EiPrZR1`9rfma(kW^8biO|b4;7# z-Ckf0dxqr>JlF1z40qV?dxJq>4;;tE=Lq+NVKf{L`u%~UdGj_+j#5AH!!=%N(_+2J zQN*99lwxdMlf$%1Wm~by;ie_50`}uJIR=aCw{dME%5BYyjiRMA(fg+q$J2E5`&7r- z*t+sK+fuA(|FP>{g{iXMeUCPCk=t59^IL`OBLyJIAT_j~svFgq_}x##L=ElFfQf(e z(=bs(`!itTZ(sg2T-5RY4A}VHPsBzY@6UjZfAbTuQOEmfvr(@%K2m(vQGTk-`%z0A zHynnRZw)MOX!Hh-;fA5#w?c1d+jbBQjG^7fCi0%)86m0|#~4^q&yOs-A4Q?z;JWEL zqfywig8*atBdmY9-oOfn4(c2aYh8~nY5MD%A1c=2K0+~5jX6;p6^Uxg82=MptHrva zxnBgVTYP;=Me}*N5067vmOu4d%*xKH5;C@KGJ{K zbz!Zk{!!Ve;r+DPm|GS5NG#0Z{M6Z}8PrE(pN8{OXP;(BAB}w)&QF_tdd>KexTmB1 zRGHT%puhJ@+%Syr?33x*!O%eUIT}R++jNJhNPEM9-yijYfol$&$RCYH1IHd=lcpP4 zP80;87X|%MKQsadYi!)Y=|^@nGOT{&Sbf_J`XP$haAbx<+jmVH>u{DGJ-%$6KCCL9 zNBIa+7q|0 zeqLhZd+DCY#Lr7?@^-%`a`E#LTk?{`C$jPL659u1JX+dRT{cMFzY3ER3(=^b2L>lvlz z%-WQsDMwVYTO_Gou-F-Dd7d$JbC@hjgbjLWx0RAT$~Yg#`$P zhvy*qiVFQlW{xek;EQm_8UY(QY5JxQ+VI{H$8)U_sBm$1j?4TW7vcU;FDw@qIPeGzbz=h@aWVWEj#*_PS zJ9vp^cGkqG02G4bpb~5L;Hu~aQ>znhT0LCD=4e0FmOLFz zpRmA)$6gZ-Qn&aCGK74odFfoKbtA|Q98Nfz=!Z+a5PhU(kbbOHifo9HisFnM+EU?% zFdWy3AXfu=4vWBV2hz3d0QW};1qu>?yAH@H{}2-94cj9{Y6M(6hBzkpT%qB(W442S~P~i-4y*R?coi^r}^B=;YY77QA0@##4{-ju%FgOt~<9-T5 z{nU*R-Kw0&fe{t+sGdcNp<*8O7{E_HLR+#=!C<31nDAn#q$g)g$OsJQ{PQzUe6qS5 ze2myn%-_i|#8AB3bs+;-Qu&8>qa_Is2Ya^d&>GSE(Zf*tJs*}if@h--Mr43B*f_GG ztv+nSlY0?y=n<)pP=1^h?2IEOKJ6n?Vm)|v0TyG8)J!mk5ocXZ8!W_u^GkdmeVXHr z@HBdAnMR1+KFK*P78=@XEz1usWP8))H4&d!B4Xmbnx#j_iw`UK=t`H8h!*da$}#4B z@)1bVffkQk2niBJe599vyg*P{y80Qh3K>mVL)s&Y5)b*?@i2ZsOTrN}1^xW)5DBFdIBP1TH`Z{3O3E2bM_x&hdmMzac3&mPB%?3=^5AQj2is{+>Mlnm=jt zL3AqU3*%-)4Fq=hJVX-ci-Q=?1sG6{S7$)poTca|W(qGVcMmx$juZ)t_9c``jD#4H z5zrDwDBRslC9jHp5iUoIW)VJQNsg0WjLEo8{sV3R9p>2v47Cu`c|tctI)HJ|?h^WS zh!$rB3r2>EJQ|cS4%^76k@>jgHG?a-F#)Gu%B}y$# zG&MWg|Ajp=29GEr(Y1q6m3(B5;Bs~LRfgQ&hY98ITHrcr2+&9Fj~+v*K&Z$Hg=<4v zz7CE0BUeO!gum3<%-0q)ldvtyi&_q8P=>Gs=5O$LpjI?bY@S(N0L`WkM2vNjUGqL* zlL#$VlvoV(m$QW*aG(dY9n83`O^75?y|Rg)W>f*MtppP8G24LtF_+cs6qGX$6Nxm_ zm-3003JFqbPkKm~}+5-3J^)jtE=m~_20XJQvyhBAIajOQJSaf526>g%cLEeZ86qQyW z*(B>#W}$f35GpvRkK~NJ2PK@yhoyf{xRFHKM_72m5#ephjeJ}XZqU26@b~n=lrAN! z#wniH9|<4XF)^Vf>_#~Boe&9LT!+vT;-7`zcbfQf$Nkr)#V0Mzz4 zS$AeQSwRSS&}CCBdW6sP0IUur8fBco8F)BGstl(!0@hMBi}_(cKK_a4rZgF04`PwI z>o{6qNpY|{LQj?0Dt!^?95PqR*O1k66PZyy(0x!_j4{j?X!(0%4|R(bQAET z&Pci0I+_tv!ZGRQpzi_?%yVcB3Y7(@?e$Wq%=|hnG|SMLfg_vFD3u6|ii=z(w{c$U z{t${#=EL7WBIx`_&JY`nLRbf-!w8P(kRNrJf0&;|50-=Ix`E^h5zaKV{yTzR;JF=MYmH zitwh)B6dv{q z!fFox5FWD;NR>0RrT>g|7rbV%8fqiAGJHZMVKUa6un|zhzcnyh^gEz$^jCz*h{=DT z5&x3hKGPO8`-8rov}&`r0gH<^5F@!zMJeo>whRq!{t-%ZbA=b^VY!IjfO!*E0q|G6 zPBkQffZPz$0N-?4GDz?>$m^>+w>nHhLeC|s0rpywK}qP*@2@@5yOV2Ss#WxqXOTgGG?0xS z{;7~fHVbr(4v(+mL*_->jca+t{C#e)lm`zke;i?%rzOCjX-J42U&|hVh(+W!7V>s|M z5jsk&4^RuXf{=^Ui1^6Nq`Hu5vK53JFT_%~&NpD6=)G(Wl`Y0{k4LNZp9*WuAdFnT zru?A2HbOww{WJ)0dM&7Eq5%+EP|*Yd6dbgXRJ-%iRGjQR6@u{LAQers5I#H!0{GCP zLTaJ`l6WEn;ln`~O*9A}o&*7WIOw|%>cbNu2p!~P`q_hgcp?Ph!$BAa z`S2bP(7A$-mg*Um?cuzlOapc#nDFxS^<2q$51j-A_6q~_BU=4b(vU!C;irkC7Q%i7 z&#(qh zU%3(Z>~{-xc44@4x4_TZIZFSzR!@>SiZ*aPf>g7gCbQX}?AEWkw67dC%l;!CJl?kP^9`1@D z-w;O>m4#U+9SP7zWp1vL1Tr`P$k^KlxTv}EHc^7jHA$KIA?SPl27pc-H-ytUiv#!Q zw|Xx3jBsTUogm$V;h5^e{!k%$3v{lDamAdS*~Ih8nqs z*inU;3uuf)kA}{@IGh3LN5`3!PHWHEsE?%GqEAS4kdnj|srIJpy;$uR%$f zUshm+t~E$s^fr+weJ^BO^ge*u47*l(0z2aycfK(C(!#aeuZCiJ4EpB=|CvIsjT;B( zZm>Ur%#}x4f^@+0G#4UL1Lho7eKCpz0$H9q@W$#p@T=@n4xfe1yaXO8_^GZ zKO-xu{gqN>W(A_d+(8d{iAPYGV#xy>)}3U&#oq;uVjN(UVXnYDFo`l*rnmP68ny;Y zQn&;Qbu^J7^9AYd6nsJIX*B2!zN-M87E4VP4ar-q@&!4_wVta0S!_UfLX(2z^Uy_t ztc6*}W_xxiXW8$jV3m)AD~z>CZq57;I+EjwzHhk|%nXabrY0vgOB+fuR#NFFJFAfG zvp6<;5F_)AK6DzIEdJc0%U^8mauQ6(HXKW#p!it3U zlvhFz_@6#OlK34^K$}BM%{Ws3S8y_~z`7#u+K*+lMt>jPniY2?q~h4z2-Im!PattD zqAP0_dIRq5WS2r}dP)g6Q;PCXxoQE(mGq?2!7d5TgwT0!{oK_RQUgCDdq;!|NsXEA z{goPl>%5X77uQUL3(OtpIb;t@fOxxql27EqM;uLe<9K! zzp@Sh=Vsx#8d@0hi-sd~`zS$bvdENui$cc_XG-zwaTxYI)}Eq;XWf6Q;R)-E&pd&W zE3Lvd!p}U(R$)_HLE-wDCqV2#nh9cQp2+3JX;dRw&+H;{yNWSZhE3S+tDoMZ)G)Klv4$7emXwaiF8eP zi8h4Hr{US<>0^ct2E($jvPo!oJ01B@tx?(>2H&JQQ<4lcxnT+zH;G2kBsw2Nd8wA( zH=V~e8$&_+<4{+f1BhhE48CO&0-Mr+<0e!i?Y@5!HgvT2K%c)j>*w^{vgDpWG)#GP zI-f;nm$0sAG}6i6v)_S7MS;wh!8xXf*DEE^;8o~~%mCFSI=hH4@D}?^4F?CM2a;ZZ zC6ie=KA&lC|2;Mu^>1`B2v6sy(fP^jYmH4V=r>J2FwwliOSj-Vo49fDavq+az~R3* zAJ24lD2T)8WD-T%wFB@5bSd-cWiB^R-~`mbq#EZL-u6W+{G5 zgP;x{?}7*qcJl$KB9xcWV0NKLk9G=uRBPHK9p4bNnv zM3Gs%kDLgL5k8-emjB#?LQ24EHc(Cz%TP0XHMx}LpHCSb#oZ1rCKUPj;#`}GIy{~I zg+@e!CY;XZ(-)Ibh$xYQR6sN#y=Nf~7z889Z_-~)=??pzC4qUhcS$P5cxSsR3yz8KBWoEh)FU?G0$|k^) z#o(7Wf;hZC1>fc7Mi-N_d^*GA5zI?iq%>3`QsqGhBE|F) zW-yq9BP{`KfIGeXX8zc0Y5Hw=Ilt&nqshx?GKc#;(cj*z8QgC3(9Yh(pEcT>CXU+r zNn@7)Y^UY|`JiTf8@`N^TxJg6d!k2y%!2$(fSnHNzG{fGZ}J7pSvZ@Ft0oSofs!(? z(1^x+9Su~_ma6|rauApv-gk;6eY};Y=c!;!$K(ZEbmI$F;Y>`G2BzbXCUWTzTL%68 zxC|j-v2l&dd!OcavA+Z*9N+nfwLLtg$9_X*(4dAG4d3(eeroO@%D%(2)3Lh&XuvyP z@y0{Y8T9O&Q{x;lOE>VimBz;!UOTo;BAC(mr7i_!H7XW=+f=g^MNyj(7G+elrdfP< zKaB6=)lIzGjn>zT+tYaUX7^T6bj+kzHV9$)0o=(XqV5=+UZB{CW^;Oix%M4m61D41?JUgrX0eEGZmzGct{qcd5nlfRzoj5i=jX4N zxAAImr`fJ%)X_PbHYovT-Rp-dHIST9pH7D1Y3aS#AFwxga}B^-3Yt2(`PY zkTq5o<#xGQzu7GAD#yM7?D=l9SZ!|?JB<%DFfJE4aVwCop1;_F#8=N3*Y}IxKg7FT z{ObALaEzGsmakuT zj<%cYzhAC827b(rrV>SzgY_LzRbskaY<8XX>(1`&V%NEgcZ-|FZqd10zInUrT*aN` z>fPdYdDFRhh&#J=XVJO7UEbd>S8qD&`*^e1L2RA3i+4P6700U%PJ$_S@lEIIhYocI zqt>_DB&n#s`oovc$GiCMD}4Bp2Oa9^ufOPr=fiKu!`atgbhh}Sn_HYRzWBCm${MWXVpVg7|7j@_w07z{%bL*VeL^3qv>EWzGQZ+