diff --git a/ale/base/base.py b/ale/base/base.py index 33d2bdd7a..2f3acc8c6 100644 --- a/ale/base/base.py +++ b/ale/base/base.py @@ -509,10 +509,9 @@ def read_geodata(self): from osgeo import gdal gdal.UseExceptions() except: - self._projection = "" - return self._projection + self._geodata = None + return self._geodata - self._geodata = None if isinstance(self._file, pvl.PVLModule): # save it to a temp folder with tempfile.NamedTemporaryFile() as tmp: diff --git a/ale/base/label_isis.py b/ale/base/label_isis.py index 718987454..7e2611080 100644 --- a/ale/base/label_isis.py +++ b/ale/base/label_isis.py @@ -1,4 +1,6 @@ import pvl +import json +import datetime class IsisLabel(): """ @@ -21,16 +23,44 @@ def label(self): Raised when an invalid label is provided. """ if not hasattr(self, "_label"): - if isinstance(self._file, pvl.PVLModule): + if isinstance(self._file, pvl.PVLModule) or isinstance(self._file, dict): self._label = self._file + return self._label + else: + self._label = None + grammar = pvl.grammar.ISISGrammar() grammar.comments+=(("#", "\n"), ) + try: self._label = pvl.loads(self._file, grammar=grammar) - except Exception: - self._label = pvl.load(self._file, grammar=grammar) except: + pass + + if not self._label: + try: + self._label = pvl.load(self._file, grammar=grammar) + except: + pass + + if not self._label: + try: + self._label = json.loads(self._file) + except: + pass + + if not self._label: + try: + from osgeo import gdal + gdal.UseExceptions() + geodata = gdal.Open(self._file) + self._label = json.loads(geodata.GetMetadata("json:ISIS3")[0]) + except: + pass + + if not self._label: raise ValueError("{} is not a valid label".format(self._file)) + return self._label @property @@ -196,6 +226,9 @@ def spacecraft_clock_start_count(self): if isinstance(self._clock_start_count, pvl.Quantity): self._clock_start_count = self._clock_start_count.value + if isinstance(self._clock_start_count, dict): + self._clock_start_count = self._clock_start_count["value"] + self._clock_start_count = str(self._clock_start_count) return self._clock_start_count @@ -222,6 +255,9 @@ def spacecraft_clock_stop_count(self): if isinstance(self._clock_stop_count, pvl.Quantity): self._clock_stop_count = self._clock_stop_count.value + if isinstance(self._clock_stop_count, dict): + self._clock_stop_count = self._clock_stop_count["value"] + self._clock_stop_count = str(self._clock_stop_count) return self._clock_stop_count @@ -238,7 +274,10 @@ def utc_start_time(self): : datetime Start time of the image in UTC """ - return self.label['IsisCube']['Instrument']['StartTime'] + utc_time = self.label['IsisCube']['Instrument']['StartTime'] + if isinstance(utc_time, str): + utc_time = datetime.datetime.fromisoformat(utc_time) + return utc_time @property def utc_stop_time(self): @@ -252,7 +291,10 @@ def utc_stop_time(self): : datetime Stop time of the image in UTC """ - return self.label['IsisCube']['Instrument']['StopTime'] + utc_time = self.label['IsisCube']['Instrument']['StopTime'] + if isinstance(utc_time, str): + utc_time = datetime.datetime.fromisoformat(utc_time) + return utc_time @property def exposure_duration(self): @@ -269,14 +311,23 @@ def exposure_duration(self): # Check for units on the PVL keyword if isinstance(exposure_duration, pvl.collections.Quantity): units = exposure_duration.units - if "ms" in units.lower() or 'milliseconds' in units.lower(): - exposure_duration = exposure_duration.value * 0.001 - else: - # if not milliseconds, the units are probably seconds - exposure_duration = exposure_duration.value + value = exposure_duration.value + elif isinstance(exposure_duration, dict): + units = "" + value = exposure_duration["value"] + if "unit" in exposure_duration: + units = exposure_duration["unit"] else: # if no units are available, assume the exposure duration is given in milliseconds - exposure_duration = exposure_duration * 0.001 + units = "ms" + value = exposure_duration + + if "ms" in units.lower() or 'milliseconds' in units.lower(): + exposure_duration = value * 0.001 + else: + # if not milliseconds, the units are probably seconds + exposure_duration = value + return exposure_duration else: return self.line_exposure_duration @@ -292,16 +343,26 @@ def line_exposure_duration(self): Line exposure duration in seconds """ line_exposure_duration = self.label['IsisCube']['Instrument']['LineExposureDuration'] + # Check for units on the PVL keyword if isinstance(line_exposure_duration, pvl.collections.Quantity): units = line_exposure_duration.units - if "ms" in units.lower(): - line_exposure_duration = line_exposure_duration.value * 0.001 - else: - # if not milliseconds, the units are probably seconds - line_exposure_duration = line_exposure_duration.value + value = line_exposure_duration.value + elif isinstance(line_exposure_duration, dict): + units = "" + value = line_exposure_duration["value"] + if "unit" in line_exposure_duration: + units = line_exposure_duration["unit"] else: # if no units are available, assume the exposure duration is given in milliseconds - line_exposure_duration = line_exposure_duration * 0.001 + units = "ms" + value = line_exposure_duration + + if "ms" in units.lower() or 'milliseconds' in units.lower(): + line_exposure_duration = value * 0.001 + else: + # if not milliseconds, the units are probably seconds + line_exposure_duration = value + return line_exposure_duration @@ -315,16 +376,26 @@ def interframe_delay(self): : float interframe delay in seconds """ - interframe_delay = self.label['IsisCube']['Instrument']['InterframeDelay'] + interframe_delay = self.label['IsisCube']['Instrument'].get('InterframeDelay', None) + if interframe_delay == None: + interframe_delay = self.label['IsisCube']['Instrument'].get('InterFrameDelay', None) if isinstance(interframe_delay, pvl.collections.Quantity): units = interframe_delay.units - if "ms" in units.lower(): - interframe_delay = interframe_delay.value * 0.001 - else: - # if not milliseconds, the units are probably seconds - interframe_delay = interframe_delay.value + value = interframe_delay.value + elif isinstance(interframe_delay, dict): + units = "ms" + value = interframe_delay["value"] + if "unit" in interframe_delay: + units = interframe_delay["unit"] else: # if no units are available, assume the interframe delay is given in milliseconds - interframe_delay = interframe_delay * 0.001 + units = "ms" + value = interframe_delay + + if "ms" in units.lower() or 'milliseconds' in units.lower(): + interframe_delay = interframe_delay.value * 0.001 + else: + # if not milliseconds, the units are probably seconds + interframe_delay = interframe_delay.value return interframe_delay diff --git a/ale/driver_verification.py b/ale/driver_verification.py index 9ddbe12ee..06daf798b 100644 --- a/ale/driver_verification.py +++ b/ale/driver_verification.py @@ -320,7 +320,7 @@ def main(image): run_spiceinit_isis(image_isis_path) # try ale.loads - isis_kerns = ale.util.generate_kernels_from_cube(image_isis_path, expand=True) + isis_kerns = ale.kernel_access.generate_kernels_from_cube(image_isis_path, expand=True) # this can be uncommented and used when the PVL loads fix PR goes in (#587) isis_label = pvl.load(image_isis_path) try: @@ -333,7 +333,7 @@ def main(image): run_spiceinit_ale(image_ale_path) # try ale.loads - ale_kerns = ale.util.generate_kernels_from_cube(image_ale_path, expand=True) + ale_kerns = ale.kernel_access.generate_kernels_from_cube(image_ale_path, expand=True) ale.loads(image_ale_path, props={"kernels": ale_kerns}, only_naif_spice=True) # Generate ISD for both ALE and ISIS diff --git a/ale/drivers/__init__.py b/ale/drivers/__init__.py index 1c9cb0974..51dacad22 100644 --- a/ale/drivers/__init__.py +++ b/ale/drivers/__init__.py @@ -125,7 +125,10 @@ def load(label, props={}, formatter='ale', verbose=False, only_isis_spice=False, if verbose: logger.info("First parse attempt failed with") logger.info(e) - # If pds3 label fails, try isis grammar + parsed_label = None + + # If pds3 label fails, try isis grammar + if not parsed_label: try: parsed_label = parse_label(label, pvl.grammar.ISISGrammar()) except Exception as e: @@ -135,6 +138,19 @@ def load(label, props={}, formatter='ale', verbose=False, only_isis_spice=False, # If both fail, then don't parse the label, and just pass the driver a file. parsed_label = None + # If pvl label loading fails, try gdal + if not parsed_label: + try: + from osgeo import gdal + gdal.UseExceptions() + geodata = gdal.Open(label) + parsed_label = json.loads(geodata.GetMetadata("json:ISIS3")["doc"]) + except Exception as e: + if verbose: + logger.info("Gdal parse attempt failed with") + logger.info(e) + parsed_label = None + if verbose: if parsed_label: logger.info("Successfully pre-parsed label file") diff --git a/ale/drivers/apollo_drivers.py b/ale/drivers/apollo_drivers.py index b52e2c9bd..be7a8c2d0 100644 --- a/ale/drivers/apollo_drivers.py +++ b/ale/drivers/apollo_drivers.py @@ -74,14 +74,22 @@ def exposure_duration(self): # Check for units on the PVL keyword if isinstance(exposure_duration, pvl.collections.Quantity): units = exposure_duration.units - if "ms" in units.lower() or 'milliseconds' in units.lower(): - exposure_duration = exposure_duration.value * 0.001 - else: - # if not milliseconds, the units are probably seconds - exposure_duration = exposure_duration.value + value = exposure_duration.value + elif isinstance(exposure_duration, dict): + units = "" + value = exposure_duration["value"] + if "unit" in exposure_duration: + units = exposure_duration["unit"] else: # if no units are available, assume the exposure duration is given in milliseconds - exposure_duration = exposure_duration * 0.001 + units = "ms" + value = exposure_duration + + if "ms" in units.lower() or 'milliseconds' in units.lower(): + exposure_duration = value * 0.001 + else: + # if not milliseconds, the units are probably seconds + exposure_duration = value return exposure_duration @property diff --git a/ale/drivers/co_drivers.py b/ale/drivers/co_drivers.py index 8c0e163c5..c1e6627be 100644 --- a/ale/drivers/co_drivers.py +++ b/ale/drivers/co_drivers.py @@ -375,11 +375,14 @@ def exposure_duration(self): exposure_duration = self.label['IsisCube']['Instrument']['ExposureDuration'] for i in exposure_duration: - if i.units == "VIS": - exposure_duration = i - - exposure_duration = exposure_duration.value * 0.001 - return exposure_duration + if isinstance(i, pvl.collections.Quantity): + if i.units == "VIS": + exposure_duration = i.value + elif isinstance(i, dict): + if i["unit"] == "VIS": + exposure_duration = i["value"] + + return exposure_duration * 0.001 else: return self.line_exposure_duration @@ -502,13 +505,15 @@ def exposure_duration(self): """ if 'ExposureDuration' in self.label['IsisCube']['Instrument']: exposure_duration = self.label['IsisCube']['Instrument']['ExposureDuration'] - for i in exposure_duration: - if i.units == "VIS": - exposure_duration = i - - exposure_duration = exposure_duration.value * 0.001 - return exposure_duration + if isinstance(exposure_duration, pvl.collections.Quantity): + if i.units == "VIS": + exposure_duration = i.value + elif isinstance(exposure_duration, dict): + if i["unit"] == "VIS": + exposure_duration = i["value"] + + return exposure_duration * 0.001 else: return self.line_exposure_duration diff --git a/ale/drivers/isis_ideal_drivers.py b/ale/drivers/isis_ideal_drivers.py index ff9a59575..51a16e965 100644 --- a/ale/drivers/isis_ideal_drivers.py +++ b/ale/drivers/isis_ideal_drivers.py @@ -37,8 +37,13 @@ def ephemeris_start_time(self): float : The image start ephemeris time """ - - return self.label.get('IsisCube').get('Instrument').get("EphemerisTime").value + if not hasattr(self, "_ephemeris_start_time"): + self._ephemeris_start_time = self.label['IsisCube']['Instrument']["EphemerisTime"] + if isinstance(self._ephemeris_start_time, pvl.collections.Quantity): + self._ephemeris_start_time = self._ephemeris_start_time.value + elif isinstance(self._ephemeris_start_time, dict): + self._ephemeris_start_time = self._ephemeris_start_time["value"] + return self._ephemeris_start_time @property diff --git a/ale/drivers/lo_drivers.py b/ale/drivers/lo_drivers.py index df727d5f0..4f8301e32 100644 --- a/ale/drivers/lo_drivers.py +++ b/ale/drivers/lo_drivers.py @@ -1,5 +1,7 @@ import numpy as np +import pvl import spiceypy as spice + from ale.base.data_naif import NaifSpice from ale.base.label_isis import IsisLabel from ale.base.type_sensor import Framer @@ -151,11 +153,21 @@ def naif_keywords(self): if (not hasattr(self, "_naif_keywords")): # From ISIS LoCameraFiducialMap + p_fidSamples = self.label['IsisCube']['Instrument']['FiducialSamples'] + p_fidLines = self.label['IsisCube']['Instrument']['FiducialLines'] + p_fidXCoords = self.label['IsisCube']['Instrument']['FiducialXCoordinates'] + p_fidYCoords = self.label['IsisCube']['Instrument']['FiducialYCoordinates'] # Read Fiducials - p_fidSamples = self.label['IsisCube']['Instrument']['FiducialSamples'].value - p_fidLines = self.label['IsisCube']['Instrument']['FiducialLines'].value - p_fidXCoords = self.label['IsisCube']['Instrument']['FiducialXCoordinates'].value - p_fidYCoords = self.label['IsisCube']['Instrument']['FiducialYCoordinates'].value + if isinstance(p_fidSamples, pvl.collections.Quantity): + p_fidSamples = p_fidSamples.value + p_fidLines = p_fidLines.value + p_fidXCoords = p_fidXCoords.value + p_fidYCoords = p_fidYCoords.value + elif isinstance(p_fidSamples, dict): + p_fidSamples = p_fidSamples["value"] + p_fidLines = p_fidLines["value"] + p_fidXCoords = p_fidXCoords["value"] + p_fidYCoords = p_fidYCoords["value"] # Create Affine Transformation p_src = [p_fidSamples, p_fidLines] diff --git a/ale/drivers/lro_drivers.py b/ale/drivers/lro_drivers.py index ce7d6aee0..42861d334 100644 --- a/ale/drivers/lro_drivers.py +++ b/ale/drivers/lro_drivers.py @@ -1,6 +1,6 @@ -import spiceypy as spice - import numpy as np +import pvl +import spiceypy as spice from pyspiceql import pyspiceql from ale.base import Driver @@ -756,7 +756,11 @@ def wavelength(self): """ # Get float value of frequency in GHz - frequency = self.label['IsisCube']['Instrument']['Frequency'].value + frequency = self.label['IsisCube']['Instrument']['Frequency'] + if isinstance(frequency, pvl.collections.Quantity): + frequency = frequency.value + elif isinstance(frequency, dict): + frequency = frequency["value"] #wavelength = spice.clight() / frequency / 1000.0 wavelength = 299792.458 / frequency / 1000.0 return wavelength diff --git a/ale/drivers/mess_drivers.py b/ale/drivers/mess_drivers.py index 9dd8cd4a4..570c50b81 100644 --- a/ale/drivers/mess_drivers.py +++ b/ale/drivers/mess_drivers.py @@ -1,4 +1,5 @@ import numpy as np +import pvl from pyspiceql import pyspiceql from ale.base import Driver @@ -347,7 +348,11 @@ def focal_length(self): f_t = np.poly1d(coeffs[::-1]) # eval at the focal_plane_temperature - self._focal_length = f_t(self.label['IsisCube']['Instrument']['FocalPlaneTemperature'].value) + focal_temp = self.label['IsisCube']['Instrument']['FocalPlaneTemperature'] + if isinstance(focal_temp, pvl.collections.Quantity): + self._focal_length = f_t(focal_temp.value) + elif isinstance(focal_temp, dict): + self._focal_length = f_t(focal_temp["value"]) return self._focal_length @property diff --git a/ale/drivers/mgs_drivers.py b/ale/drivers/mgs_drivers.py index 3671d8579..0daa9242b 100644 --- a/ale/drivers/mgs_drivers.py +++ b/ale/drivers/mgs_drivers.py @@ -53,7 +53,7 @@ def ephemeris_stop_time(self): the ephemeris stop time of the image, so compute the ephemeris stop time from the start time and the exposure duration. """ - return self.ephemeris_start_time + (self.exposure_duration/1000 * ((self.image_lines) * self.label['IsisCube']['Instrument']['DowntrackSumming'])) + return self.ephemeris_start_time + (self.exposure_duration * ((self.image_lines) * self.label['IsisCube']['Instrument']['DowntrackSumming'])) @property def detector_start_sample(self): @@ -186,7 +186,7 @@ def ephemeris_stop_time(self): the ephemeris stop time of the image, so compute the ephemeris stop time from the start time and the exposure duration. """ - return self.ephemeris_start_time + (self.exposure_duration/1000 * ((self.image_lines) * self.label['IsisCube']['Instrument']['DowntrackSumming'])) + return self.ephemeris_start_time + (self.exposure_duration * ((self.image_lines) * self.label['IsisCube']['Instrument']['DowntrackSumming'])) @property def detector_start_sample(self): diff --git a/ale/drivers/mro_drivers.py b/ale/drivers/mro_drivers.py index 4c9cb1ae7..52f36ce4f 100644 --- a/ale/drivers/mro_drivers.py +++ b/ale/drivers/mro_drivers.py @@ -101,7 +101,7 @@ def compute_marci_time(self, line): } self._filters = self.label["IsisCube"]["BandBin"]["FilterName"] - self._framelet_rate = self.label["IsisCube"]["Instrument"]["InterframeDelay"].value + self._framelet_rate = self.interframe_delay framelet_height = 16 self._actual_framelet_height = framelet_height / sum_mode diff --git a/ale/drivers/nh_drivers.py b/ale/drivers/nh_drivers.py index 9ea2f89af..906ec6488 100644 --- a/ale/drivers/nh_drivers.py +++ b/ale/drivers/nh_drivers.py @@ -343,8 +343,10 @@ def band_times(self): band_times = self.label['IsisCube']['BandBin']['UtcTime'] self._ephem_band_times = [] for time in band_times: - if type(time) is pvl.Quantity: - time = time.value + if isinstance(time, pvl.collections.Quantity): + time = time.value + elif isinstance(time, dict): + time = time["value"] self._ephem_band_times.append(self.spiceql_call("utcToEt", {"utc": time.strftime("%Y-%m-%d %H:%M:%S.%f")})) return self._ephem_band_times diff --git a/ale/drivers/ody_drivers.py b/ale/drivers/ody_drivers.py index 499f577da..b9e27fa7c 100644 --- a/ale/drivers/ody_drivers.py +++ b/ale/drivers/ody_drivers.py @@ -111,6 +111,13 @@ def start_time(self): else: # if not milliseconds, the units are probably seconds offset = offset.value + elif isinstance(offset, dict): + units = offset["unit"] + if "ms" in units.lower(): + offset = offset["value"] * 0.001 + else: + # if not milliseconds, the units are probably seconds + offset = offset["value"] return og_start_time + offset @@ -321,6 +328,13 @@ def start_time(self): else: # if not milliseconds, the units are probably seconds offset = offset.value + elif isinstance(offset, dict): + units = offset["unit"] + if "ms" in units.lower(): + offset = offset["value"] * 0.001 + else: + # if not milliseconds, the units are probably seconds + offset = offset["value"] return og_start_time + offset - (self.exposure_duration / 2) diff --git a/ale/isd_generate.py b/ale/isd_generate.py index b8bc69444..f2acab4fd 100755 --- a/ale/isd_generate.py +++ b/ale/isd_generate.py @@ -155,7 +155,7 @@ def main(): k = None else: try: - k = ale.util.generate_kernels_from_cube(args.kernel, expand=True) + k = ale.kernel_access.generate_kernels_from_cube(args.kernel, expand=True) except (KeyError, pvl.exceptions.LexerError): k = [args.kernel, ] diff --git a/ale/kernel_access.py b/ale/kernel_access.py index 5705ad08b..3312ee3d7 100644 --- a/ale/kernel_access.py +++ b/ale/kernel_access.py @@ -5,9 +5,11 @@ from os import path import re import warnings +from collections.abc import Iterable import numpy as np import pvl +import json from ale import spice_root from ale.util import get_isis_preferences @@ -130,16 +132,25 @@ def generate_kernels_from_cube(cube, expand=False, format_as='list'): Dictionary of lists of kernels with the keys being the Keywords from the Kernels group of cube itself, and the values being the values associated with that Keyword in the cube. """ - # enforce key order - mk_paths = OrderedDict.fromkeys( - ['TargetPosition', 'InstrumentPosition', - 'InstrumentPointing', 'Frame', 'TargetAttitudeShape', - 'Instrument', 'InstrumentAddendum', 'LeapSecond', - 'SpacecraftClock', 'Extra']) - # just work with full path cube = os.path.abspath(cube) - cubelabel = pvl.load(cube) + cubelabel = None + try: + cubelabel = pvl.load(cube) + except: + cubelabel = None + + if (cubelabel == None): + try: + from osgeo import gdal + gdal.UseExceptions() + geodata = gdal.Open(cube) + cubelabel = json.loads(geodata.GetMetadata("json:ISIS3")[0]) + except Exception as e: + cubelabel = None + + if (cubelabel == None): + raise RuntimeError(f"Could not parse {cube} for pvl or json label") try: kernel_group = cubelabel['IsisCube'] @@ -155,33 +166,28 @@ def get_kernels_from_isis_pvl(kernel_group, expand=True, format_as="list"): ['TargetPosition', 'InstrumentPosition', 'InstrumentPointing', 'Frame', 'TargetAttitudeShape', 'Instrument', 'InstrumentAddendum', 'LeapSecond', - 'SpacecraftClock', 'Extra']) - + 'SpacecraftClock', 'Extra', 'Clock', 'ShapeModel']) if isinstance(kernel_group, str): kernel_group = pvl.loads(kernel_group) kernel_group = kernel_group["Kernels"] - def load_table_data(key): + def read_kernels(key): mk_paths[key] = kernel_group.get(key, None) - if isinstance(mk_paths[key], str): + if (mk_paths[key] == "Null"): + mk_paths[key] = None + if isinstance(mk_paths[key], str) or mk_paths[key] == None: mk_paths[key] = [mk_paths[key]] while 'Table' in mk_paths[key]: mk_paths[key].remove('Table') while 'Nadir' in mk_paths[key]: mk_paths[key].remove('Nadir') - load_table_data('TargetPosition') - load_table_data('InstrumentPosition') - load_table_data('InstrumentPointing') - load_table_data('TargetAttitudeShape') - # the rest - mk_paths['Frame'] = [kernel_group.get('Frame', None)] - mk_paths['Instrument'] = [kernel_group.get('Instrument', None)] - mk_paths['InstrumentAddendum'] = [kernel_group.get('InstrumentAddendum', None)] - mk_paths['SpacecraftClock'] = [kernel_group.get('SpacecraftClock', None)] - mk_paths['LeapSecond'] = [kernel_group.get('LeapSecond', None)] - mk_paths['Clock'] = [kernel_group.get('Clock', None)] - mk_paths['Extra'] = [kernel_group.get('Extra', None)] + for key in mk_paths.keys(): + read_kernels(key) + + if (mk_paths['ShapeModel'][0]): + if (os.path.splitext(mk_paths['ShapeModel'][0])[-1] != "bds"): + mk_paths['ShapeModel'] = [None] # handles issue with OsirisRex instrument kernels being in a 2d list if isinstance(mk_paths['Instrument'][0], list): @@ -218,6 +224,7 @@ def load_table_data(key): return mk_paths elif (format_as == 'spiceql'): mk_paths.pop("Clock") + mk_paths.pop("ShapeModel") mk_paths["ck"] = [k.replace("$", "") for k in mk_paths.pop("InstrumentPointing") if k] mk_paths["spk"] = [k.replace("$", "") for k in mk_paths.pop("InstrumentPosition") if k] mk_paths["pck"] = [k.replace("$", "") for k in mk_paths.pop("TargetAttitudeShape") if k] diff --git a/ale/util.py b/ale/util.py index aad9fcd54..da1547f19 100644 --- a/ale/util.py +++ b/ale/util.py @@ -316,184 +316,6 @@ def replace_var(m): return path -def generate_kernels_from_cube(cube, expand=False, format_as='list'): - """ - Parses a cube label to obtain the kernels from the Kernels group. - - Parameters - ---------- - cube : cube - Path to the cube to pull the kernels from. - expand : bool, optional - Whether or not to expand variables within kernel paths based on your IsisPreferences file. - See :func:`get_isis_preferences` for how the IsisPreferences file is found. - format_as : str, optional {'list', 'dict'} - How to return the kernels: either as a one-dimensional ordered list, or as a dictionary - of kernel lists. - - Returns - ------- - : list - One-dimensional ordered list of all kernels from the Kernels group in the cube. - : Dictionary - Dictionary of lists of kernels with the keys being the Keywords from the Kernels group of - cube itself, and the values being the values associated with that Keyword in the cube. - """ - # just work with full path - cube = os.path.abspath(cube) - cubelabel = pvl.load(cube) - - try: - kernel_group = cubelabel['IsisCube'] - except KeyError: - raise KeyError(f'{cubelabel}, Could not find kernels group, input cube [{cube}] may not be spiceinited') - - return get_kernels_from_isis_pvl(kernel_group, expand, format_as) - -def get_kernels_from_isis_pvl(kernel_group, expand=True, format_as="list"): - """ - Extract kernels from ISIS PVL. - - Parameters - ---------- - kernel_group : str - The target kernel group to extract - expand : bool, optional - True if values of environment variables should be expanded, by default True - format_as : str, optional - Desired output format, by default "list" - - Returns - ------- - list|str|obj - The extracted kernels in the user-specified format - - Raises - ------ - Exception - Raised if the user specifies an invalid or unsupported format. - """ - # enforce key order - mk_paths = OrderedDict.fromkeys( - ['TargetPosition', 'InstrumentPosition', - 'InstrumentPointing', 'Frame', 'TargetAttitudeShape', - 'Instrument', 'InstrumentAddendum', 'LeapSecond', - 'SpacecraftClock', 'Extra', 'ShapeModel']) - if isinstance(kernel_group, str): - kernel_group = pvl.loads(kernel_group) - - kernel_group = kernel_group["Kernels"] - - def load_table_data(key): - mk_paths[key] = kernel_group.get(key, None) - if isinstance(mk_paths[key], str): - mk_paths[key] = [mk_paths[key]] - while 'Table' in mk_paths[key]: mk_paths[key].remove('Table') - while 'Nadir' in mk_paths[key]: mk_paths[key].remove('Nadir') - - load_table_data('TargetPosition') - load_table_data('InstrumentPosition') - load_table_data('InstrumentPointing') - load_table_data('TargetAttitudeShape') - # the rest - mk_paths['Frame'] = [kernel_group.get('Frame', None)] - mk_paths['Instrument'] = [kernel_group.get('Instrument', None)] - mk_paths['InstrumentAddendum'] = [kernel_group.get('InstrumentAddendum', None)] - mk_paths['SpacecraftClock'] = [kernel_group.get('SpacecraftClock', None)] - mk_paths['LeapSecond'] = [kernel_group.get('LeapSecond', None)] - mk_paths['Clock'] = [kernel_group.get('Clock', None)] - mk_paths['Extra'] = [kernel_group.get('Extra', None)] - mk_paths['ShapeModel'] = [kernel_group.get('ShapeModel', None)] - if (mk_paths['ShapeModel'][0]): - if (os.path.splitext(mk_paths['ShapeModel'][0])[-1] != "bds"): - mk_paths['ShapeModel'] = [None] - - # handles issue with OsirisRex instrument kernels being in a 2d list - if isinstance(mk_paths['Instrument'][0], list): - mk_paths['Instrument'] = np.concatenate(mk_paths['Instrument']).flat - - if (format_as == 'list'): - # get kernels as 1-d string list - kernels = [] - for kernel in chain.from_iterable(mk_paths.values()): - if isinstance(kernel, str): - kernels.append(kernel) - elif isinstance(kernel, list): - kernels.extend(kernel) - if expand: - isisprefs = get_isis_preferences() - if not "DataDirectory" in isisprefs: - warnings.warn("No IsisPreferences file found, is your ISISROOT env var set?") - - kernels = [expandvars(k, isisprefs['DataDirectory'], case_sensitive=False) for k in kernels] - # Ensure that the ISIS Addendum kernel is last in case it overrides - # some values from the default Instrument kernel - # Sorts planetary constants kernel first so it can be overridden by more specific kernels - kernels = sorted(kernels, key=lambda x: "Addendum" in x) - kernels = sorted(kernels, key=lambda x: "pck00" in x, reverse=True) - return kernels - elif (format_as == 'dict'): - # return created dict - if expand: - isisprefs = get_isis_preferences() - for kern_list in mk_paths: - for index, kern in enumerate(mk_paths[kern_list]): - if kern is not None: - mk_paths[kern_list][index] = expandvars(kern, isisprefs['DataDirectory'], case_sensitive=False) - return mk_paths - else: - raise Exception(f'{format_as} is not a valid return format') - -def write_metakernel_from_cube(cube, mkpath=None): - """ - Create a metakernel from a spiceinit'd cube. - - Parameters - ---------- - cube : str - The string filename of the cube from which to generate a metakernel. - mkpath : str, optional - The path to the output metakernel or None if not written to disk, by default None - - Returns - ------- - str - The text of the generated metakernel. - """ - # add ISISPREF paths as path_symbols and path_values to avoid custom expand logic - pvlprefs = get_isis_preferences() - - kernels = generate_kernels_from_cube(cube) - - # make sure kernels are mk strings - kernels = ["'"+k+"'" for k in kernels] - - paths = OrderedDict(pvlprefs['DataDirectory']) - path_values = ["'"+os.path.expandvars(path)+"'" for path in paths.values()] - path_symbols = ["'"+symbol.lower()+"'" for symbol in paths.keys()] - - body = '\n\n'.join([ - 'KPL/MK', - f'Metakernel Generated from an ISIS cube: {cube}', - '\\begindata', - 'PATH_VALUES = (', - '\n'.join(path_values), - ')', - 'PATH_SYMBOLS = (', - '\n'.join(path_symbols), - ')', - 'KERNELS_TO_LOAD = (', - '\n'.join(kernels), - ')', - '\\begintext' - ]) - - if mkpath is not None: - with open(mkpath, 'w') as f: - f.write(body) - - return body - def get_ck_frames(kernel): """ Get all of the reference frames defined in a kernel. @@ -904,121 +726,6 @@ def search_isis_db(dbobj, labelobj, isis_data): return kernels -def expandvars(path, env_dict=os.environ, default=None, case_sensitive=True): - if env_dict != os.environ: - env_dict = dict_merge(env_dict, os.environ) - - while "$" in path: - user_dict = env_dict if case_sensitive else dict_to_lower(env_dict) - - def replace_var(m): - group1 = m.group(1) if case_sensitive else m.group(1).lower() - val = user_dict.get(m.group(2) or group1 if default is None else default) - if not val: - raise KeyError(f"Failed to evaluate {m.group(0)} from env_dict. " + - f"Should {m.group(0)} be an environment variable?") - - return val - reVar = r'\$(\w+|\{([^}]*)\})' - path = re.sub(reVar, replace_var, path) - return path - -def find_kernels(cube, isis_data, format_as=dict): - """ - Find all kernels for a cube and return a json object with categorized kernels. - - Parameters - ---------- - - cube : str - Path to an ISIS cube - - isis_data : str - path to $ISISDATA - - format_as : obj - What type to return the kernels as, ISIS3-like dict/PVL or flat list - - Returns - ------- - : obj - Container with kernels - """ - def remove_dups(listofElements): - # Create an empty list to store unique elements - uniqueList = [] - - # Iterate over the original list and for each element - # add it to uniqueList, if its not already there. - for elem in listofElements: - if elem not in uniqueList: - uniqueList.append(elem) - - # Return the list of unique elements - return uniqueList - - cube_label = pvl.load(cube) - mission_lookup_table = get_isis_mission_translations(isis_data) - - mission_dir = mission_lookup_table[cube_label["IsisCube"]["Instrument"]["SpacecraftName"]] - mission_dir = path.join(isis_data, mission_dir.lower()) - - kernel_dir = path.join(mission_dir, "kernels") - base_kernel_dir = path.join(isis_data, "base", "kernels") - - kernel_types = [ name for name in os.listdir(kernel_dir) if os.path.isdir(os.path.join(kernel_dir, name)) ] - kernel_types.extend(name for name in os.listdir(base_kernel_dir) if os.path.isdir(os.path.join(base_kernel_dir, name))) - kernel_types = set(kernel_types) - - db_files = [] - for typ in kernel_types: - files = sorted(glob(path.join(kernel_dir, typ, "*.db"))) - base_files = sorted(glob(path.join(base_kernel_dir, typ, "*.db"))) - files = [list(it) for k,it in groupby(files, key=lambda f:os.path.basename(f).split(".")[0])] - base_files = [list(it) for k,it in groupby(base_files, key=lambda f:os.path.basename(f).split(".")[0])] - - for instrument_dbs in files: - db_files.append(read_pvl(sorted(instrument_dbs)[-1], True)) - for base_dbs in base_files: - db_files.append(read_pvl(sorted(base_dbs)[-1], True)) - - - kernels = {} - for f in db_files: - #TODO: Error checking - typ = f[0][0] - kernel_search_results = search_isis_db(f[0][1], cube_label, isis_data) - - if not kernel_search_results: - kernels[typ] = None - else: - try: - kernels[typ]["kernels"].extend(kernel_search_results["kernels"]) - if any(kernel_search_results.get("types", [None])): - kernels[typ]["types"].extend(kernel_search_results["types"]) - except: - kernels[typ] = {} - kernels[typ]["kernels"] = kernel_search_results["kernels"] - if any(kernel_search_results.get("types", [None])): - kernels[typ]["types"] = kernel_search_results["types"] - - for k,v in kernels.items(): - if v: - kernels[k]["kernels"] = remove_dups(v["kernels"]) - - if format_as == dict: - return kernels - elif format_as == list: - kernel_list = [] - for _,kernels in kernels.items(): - if kernels: - kernel_list.extend(kernels["kernels"]) - return kernel_list - else: - warnings.warn(f"{format_as} is not a valid format, returning as dict") - return kernels - - def merge_kernels(dict1, dict2, strategy='combine'): """ Merge two dictionaries with configurable conflict resolution strategies. diff --git a/environment.yml b/environment.yml index 0bd139c1d..888e5de0f 100644 --- a/environment.yml +++ b/environment.yml @@ -8,7 +8,7 @@ dependencies: - brotli-python - cmake>=3.15 - eigen - - gdal + - gdal >=3.12.2 - jupyter - networkx - nlohmann_json diff --git a/tests/pytests/data/EN1072174528M/EN1072174528M.tiff b/tests/pytests/data/EN1072174528M/EN1072174528M.tiff new file mode 100644 index 000000000..bf026a773 Binary files /dev/null and b/tests/pytests/data/EN1072174528M/EN1072174528M.tiff differ diff --git a/tests/pytests/data/isds/mgsmocna_isd.json b/tests/pytests/data/isds/mgsmocna_isd.json index 267cf4618..7ef352e27 100644 --- a/tests/pytests/data/isds/mgsmocna_isd.json +++ b/tests/pytests/data/isds/mgsmocna_isd.json @@ -15,7 +15,7 @@ [ 0.5, -2.9620223995298147, - 0.4821 + 0.0004821 ] ], "starting_ephemeris_time": -11224002.182053322, diff --git a/tests/pytests/data/isds/mgsmocwa_isd.json b/tests/pytests/data/isds/mgsmocwa_isd.json index d54fc0042..0bc2f3981 100644 --- a/tests/pytests/data/isds/mgsmocwa_isd.json +++ b/tests/pytests/data/isds/mgsmocwa_isd.json @@ -15,7 +15,7 @@ [ 0.5, -153.59999999403954, - 100.0 + 0.1 ] ], "starting_ephemeris_time": -69382819.36051902, diff --git a/tests/pytests/test_gtiff_label.py b/tests/pytests/test_gtiff_label.py new file mode 100644 index 000000000..2cc534a7f --- /dev/null +++ b/tests/pytests/test_gtiff_label.py @@ -0,0 +1,60 @@ +import pytest +import json +from osgeo import gdal +from datetime import datetime, timezone + +import ale +from ale import base +from ale.base.label_isis import IsisLabel + +@pytest.fixture +def test_gtiff_label(monkeypatch): + geodataset = gdal.Open("tests/pytests/data/EN1072174528M/EN1072174528M.tiff") + label = geodataset.GetMetadata("json:ISIS3")[0] + + isis_label = IsisLabel() + isis_label._file = label + + return isis_label + +def test_isis_label(test_gtiff_label): + assert "IsisCube" in test_gtiff_label.label.keys() + +def test_spacecraft_clock_start_count(test_gtiff_label): + assert test_gtiff_label.spacecraft_clock_start_count == "2/0072174528:989000" + +def test_spacecraft_clock_stop_count(test_gtiff_label): + assert test_gtiff_label.spacecraft_clock_stop_count == "2/0072174528:990000" + +def test_utc_start_time(test_gtiff_label): + assert test_gtiff_label.utc_start_time == datetime(2015, 4, 24, 4, 42, 19, 666463) + +def test_utc_stop_time(test_gtiff_label): + assert test_gtiff_label.utc_stop_time == datetime(2015, 4, 24, 4, 42, 19, 667463) + +def test_target_name(test_gtiff_label): + assert test_gtiff_label.target_name.lower() == "mercury" + +def test_exposure_duration(test_gtiff_label): + assert test_gtiff_label.exposure_duration == 0.001 + +def test_image_samples(test_gtiff_label): + assert test_gtiff_label.image_samples == 512 + +def test_image_lines(test_gtiff_label): + assert test_gtiff_label.image_lines == 512 + +def test_sample_summing(test_gtiff_label): + assert test_gtiff_label.sample_summing == 1 + +def test_line_summing(test_gtiff_label): + assert test_gtiff_label.line_summing == 1 + +def test_instrument_id(test_gtiff_label): + assert test_gtiff_label.instrument_id == "MDIS-NAC" + +def test_platform_name(test_gtiff_label): + assert test_gtiff_label.platform_name.lower() == "messenger" + +def test_sensor_name(test_gtiff_label): + assert test_gtiff_label.sensor_name.lower() == "mercury dual imaging system narrow angle camera" diff --git a/tests/pytests/test_kernel_access.py b/tests/pytests/test_kernel_access.py index 09e3d1a99..fa83fef43 100644 --- a/tests/pytests/test_kernel_access.py +++ b/tests/pytests/test_kernel_access.py @@ -135,7 +135,19 @@ def test_kernel_from_cube_dict(cube_kernels): cube.write(cube_kernels) cube.flush() kernels = kernel_access.generate_kernels_from_cube(cube.name, format_as='dict') - assert kernels == OrderedDict([('TargetPosition', ['$messenger/targetposition0', '$messenger/targetposition1']), ('InstrumentPosition', ['$messenger/instrumentposition']), ('InstrumentPointing', ['$messenger/instrumentpointing0', '$messenger/instrumentpointing1']), ('Frame', [None]), ('TargetAttitudeShape', ['$base/attitudeshape']), ('Instrument', ['$messenger/instrument']), ('InstrumentAddendum', [None]), ('LeapSecond', [None]), ('SpacecraftClock', ['$base/clock']), ('Extra', [None]), ('Clock', [None])]) + expected_dict = OrderedDict([('TargetPosition', ['$messenger/targetposition0', '$messenger/targetposition1']), + ('InstrumentPosition', ['$messenger/instrumentposition']), + ('InstrumentPointing', ['$messenger/instrumentpointing0', '$messenger/instrumentpointing1']), + ('Frame', [None]), + ('TargetAttitudeShape', ['$base/attitudeshape']), + ('Instrument', ['$messenger/instrument']), + ('InstrumentAddendum', [None]), + ('LeapSecond', [None]), + ('SpacecraftClock', ['$base/clock']), + ('Extra', [None]), + ('Clock', [None]), + ('ShapeModel', [None])]) + assert kernels == expected_dict def test_kernel_from_cube_dict_expanded(monkeypatch, tmpdir, pvl_four_group, cube_kernels): with patch.dict('os.environ', {'ISISROOT': str(tmpdir), 'ISIS3DATA': '$ISISDATA', 'ISISDATA': '/test/path'}): @@ -148,7 +160,20 @@ def test_kernel_from_cube_dict_expanded(monkeypatch, tmpdir, pvl_four_group, cub cube.write(cube_kernels) cube.flush() kernels = kernel_access.generate_kernels_from_cube(cube.name, expand=True, format_as='dict') - assert kernels == OrderedDict([('TargetPosition', ['/test/path/messenger/targetposition0', '/test/path/messenger/targetposition1']), ('InstrumentPosition', ['/test/path/messenger/instrumentposition']), ('InstrumentPointing', ['/test/path/messenger/instrumentpointing0', '/test/path/messenger/instrumentpointing1']), ('Frame', [None]), ('TargetAttitudeShape', ['/test/path/base/attitudeshape']), ('Instrument', ['/test/path/messenger/instrument']), ('InstrumentAddendum', [None]), ('LeapSecond', [None]), ('SpacecraftClock', ['/test/path/base/clock']), ('Extra', [None]), ('Clock', [None])]) + print(kernels.keys()) + expected_dict = OrderedDict([('TargetPosition', ['/test/path/messenger/targetposition0', '/test/path/messenger/targetposition1']), + ('InstrumentPosition', ['/test/path/messenger/instrumentposition']), + ('InstrumentPointing', ['/test/path/messenger/instrumentpointing0', '/test/path/messenger/instrumentpointing1']), + ('Frame', [None]), + ('TargetAttitudeShape', ['/test/path/base/attitudeshape']), + ('Instrument', ['/test/path/messenger/instrument']), + ('InstrumentAddendum', [None]), + ('LeapSecond', [None]), + ('SpacecraftClock', ['/test/path/base/clock']), + ('Extra', [None]), + ('Clock', [None]), + ('ShapeModel', [None])]) + assert kernels == expected_dict def test_kernel_from_cube_no_kernel_group(): with pytest.raises(KeyError): diff --git a/tests/pytests/test_load.py b/tests/pytests/test_load.py index 35ecbf26f..247cc7f74 100644 --- a/tests/pytests/test_load.py +++ b/tests/pytests/test_load.py @@ -13,7 +13,7 @@ from ale.drivers.mess_drivers import MessengerMdisPds3NaifSpiceDriver -from conftest import get_image_label, get_image_kernels, convert_kernels +from conftest import get_image_label, get_image_kernels, convert_kernels, get_isd, compare_dicts @pytest.fixture() def mess_kernels(): @@ -45,6 +45,18 @@ def test_mess_load(class_truth, return_val, mess_kernels): assert str(load_failure) == "No Such Driver for Label" assert return_val is False +def test_mess_load_gtiff(mess_kernels): + label_file = "tests/pytests/data/EN1072174528M/EN1072174528M.tiff" + + usgscsm_isd_str = ale.loads(label_file, {'kernels': mess_kernels, "attach_kernels": False}) + usgscsm_isd_obj = json.loads(usgscsm_isd_str) + print(usgscsm_isd_obj) + + compare_isd = get_isd("messmdis") + + comparison = compare_dicts(usgscsm_isd_obj, compare_isd) + assert comparison == [] + def test_load_invalid_label(): with pytest.raises(Exception): ale.load('Not a label path') diff --git a/tests/pytests/test_util.py b/tests/pytests/test_util.py index c8f075e41..cd725372f 100644 --- a/tests/pytests/test_util.py +++ b/tests/pytests/test_util.py @@ -97,133 +97,6 @@ def test_pvl_parser(pvl_three_group): assert obj["Test"]["t"] == "t3" assert obj["Settings"]["delsystem32"] == "yes" - -def test_find_kernels(cube_kernels, tmpdir): - ck_db = """ - Object = Pointing - Group = Selection - Time = ( "2016 JAN 01 00:00:00.000000 TDB", "2016 DEC 31 00:00:00.000000 TDB" ) - Type = Reconstructed - File = $MRO/fake - End_Group - End_Object - """ - - ik_db = """ - Object = instrument - Group = Selection - Match = ("Instrument", "InstrumentId", "fake") - File = ("fake", "not/a/real/file") - End_Group - End_Object - """ - translation = """ - Group = MissionName - InputKey = SpacecraftName - InputGroup = "IsisCube,Instrument" - InputPosition = (IsisCube, Instrument) - Translation = (fake, "fake") - End_Group - """ - - tmpdir.mkdir("fake").mkdir("kernels").mkdir("ik") - tmpdir.mkdir("base").mkdir("kernels").mkdir("ck") - tmpdir.mkdir("base", "translations") - - ck_db_file = tmpdir.join("base", "kernels", "ck", "kernel.01.db") - ik_db_file = tmpdir.join("fake", "kernels", "ik", "kernel.01.db") - translation_file = tmpdir.join("base", "translations", "MissionName2DataDir.trn") - cube_file = tmpdir.join("test.cub") - - with open(translation_file, "w") as f: - f.write(translation) - - with open(ck_db_file, "w") as f: - f.write(ck_db) - - with open(ik_db_file, "w") as f: - f.write(ik_db) - - with open(cube_file, "w") as cube: - cube.write(cube_kernels) - - print(pvl.load(str(cube_file))) - kernels = util.find_kernels(str(cube_file), str(tmpdir)) - assert kernels == {'Pointing': {'kernels': [str(tmpdir / 'MRO/fake')], 'types': ['Reconstructed']}, 'instrument': {'kernels': [str(tmpdir / 'fake/not/a/real/file')]}} - - -def test_kernel_from_cube_list(cube_kernels): - with tempfile.NamedTemporaryFile('r+') as cube: - cube.write(cube_kernels) - cube.flush() - kernels = util.generate_kernels_from_cube(cube.name) - assert kernels == ['$messenger/targetposition0', '$messenger/targetposition1','$messenger/instrumentposition', '$messenger/instrumentpointing0', '$messenger/instrumentpointing1', '$base/attitudeshape', '$messenger/instrument', '$base/clock'] - -def test_kernel_from_cube_list_expanded(monkeypatch, tmpdir, pvl_four_group, cube_kernels): - monkeypatch.setenv('ISISROOT', str(tmpdir)) - monkeypatch.setenv('ISISDATA', '/test/path') - - with open(tmpdir.join('IsisPreferences'), 'w+') as pvl_isisroot_file: - pvl_isisroot_file.write(pvl_four_group) - pvl_isisroot_file.flush() - - with tempfile.NamedTemporaryFile('r+') as cube: - cube.write(cube_kernels) - cube.flush() - kernels = util.generate_kernels_from_cube(cube.name, expand=True) - assert kernels == ['/test/path/messenger/targetposition0', '/test/path/messenger/targetposition1', '/test/path/messenger/instrumentposition', '/test/path/messenger/instrumentpointing0', '/test/path/messenger/instrumentpointing1', '/test/path/base/attitudeshape', '/test/path/messenger/instrument', '/test/path/base/clock'] - -def test_kernel_from_cube_dict(cube_kernels): - with tempfile.NamedTemporaryFile('r+') as cube: - cube.write(cube_kernels) - cube.flush() - kernels = util.generate_kernels_from_cube(cube.name, format_as='dict') - assert kernels == OrderedDict([('TargetPosition', ['$messenger/targetposition0', '$messenger/targetposition1']), - ('InstrumentPosition', ['$messenger/instrumentposition']), - ('InstrumentPointing', ['$messenger/instrumentpointing0', '$messenger/instrumentpointing1']), - ('Frame', [None]), - ('TargetAttitudeShape', ['$base/attitudeshape']), - ('Instrument', ['$messenger/instrument']), - ('InstrumentAddendum', [None]), - ('LeapSecond', [None]), - ('SpacecraftClock', ['$base/clock']), - ('Extra', [None]), - ('ShapeModel', [None]), - ('Clock', [None])]) - -def test_kernel_from_cube_dict_expanded(monkeypatch, tmpdir, pvl_four_group, cube_kernels): - monkeypatch.setenv('ISISROOT', str(tmpdir)) - monkeypatch.setenv('ISISDATA', '/test/path') - - with open(tmpdir.join('IsisPreferences'), 'w+') as pvl_isisroot_file: - pvl_isisroot_file.write(pvl_four_group) - pvl_isisroot_file.flush() - - with tempfile.NamedTemporaryFile('r+') as cube: - cube.write(cube_kernels) - cube.flush() - kernels = util.generate_kernels_from_cube(cube.name, expand=True, format_as='dict') - print(kernels.keys()) - assert kernels == OrderedDict([('TargetPosition', ['/test/path/messenger/targetposition0', '/test/path/messenger/targetposition1']), - ('InstrumentPosition', ['/test/path/messenger/instrumentposition']), - ('InstrumentPointing', ['/test/path/messenger/instrumentpointing0', '/test/path/messenger/instrumentpointing1']), - ('Frame', [None]), - ('TargetAttitudeShape', ['/test/path/base/attitudeshape']), - ('Instrument', ['/test/path/messenger/instrument']), - ('InstrumentAddendum', [None]), - ('LeapSecond', [None]), - ('SpacecraftClock', ['/test/path/base/clock']), - ('Extra', [None]), - ('ShapeModel', [None]), - ('Clock', [None])]) - -def test_kernel_from_cube_no_kernel_group(): - with pytest.raises(KeyError): - with tempfile.NamedTemporaryFile('w+') as cube: - cube.write('') - cube.flush() - util.generate_kernels_from_cube(cube.name) - def test_get_preferences_arg(tmpdir, pvl_one_group): with open(tmpdir.join('IsisPrefrences'), 'w+') as pvl_file: pvl_file.write(pvl_one_group)