Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 3 additions & 4 deletions .ci/build/apt.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import os
import subprocess
import sys
import tempfile
import urllib.request

Expand All @@ -14,16 +13,16 @@
repository = "https://iris-packages.s3.unistra.fr"

try:
urllib.request.urlopen("{}/apt/dists/{}/Release".format(repository, codename))
urllib.request.urlopen(f"{repository}/apt/dists/{codename}/Release")
except urllib.request.HTTPError:
pass
else:
with tempfile.NamedTemporaryFile() as fd:
fd.write(urllib.request.urlopen("{}/gpg.key".format(repository)).read())
fd.write(urllib.request.urlopen(f"{repository}/gpg.key").read())
fd.flush()
subprocess.check_call(["apt-key", "add", fd.name])
subprocess.check_call([
"add-apt-repository", "deb {}/apt {} main".format(repository, codename)])
"add-apt-repository", f"deb {repository}/apt {codename} main"])
subprocess.check_call(["apt-get", "update"])

subprocess.check_call([
Expand Down
6 changes: 2 additions & 4 deletions .ci/build/build.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
import multiprocessing
import os
import re
import subprocess
import sys

Expand All @@ -18,8 +16,8 @@
[
"cmake",
"-G", "Ninja",
"-DPython_EXECUTABLE={}".format(sys.executable),
"-DCMAKE_INSTALL_PREFIX={}".format(install_dir),
f"-DPython_EXECUTABLE={sys.executable}",
f"-DCMAKE_INSTALL_PREFIX={install_dir}",
workspace],
cwd=build_dir)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@ def __call__(self, data_set):
os.makedirs(self.root)

if self.iso_9660:
filename = "{:08d}.dcm".format(1+len(self.files))
count = 1 + len(self.files)
filename = f"{count:08}.dcm"
else:
filename = data_set[odil.registry.SOPInstanceUID][0].decode()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@ def __call__(self, data_set):

self._counts.setdefault(directory, 0)
if self.iso_9660:
filename = "IM{:06d}".format(1+self._counts[directory])
count = 1 + self._counts[directory]
filename = f"IM{count:06}"
else:
filename = data_set[odil.registry.SOPInstanceUID][0].decode()

Expand Down
2 changes: 1 addition & 1 deletion src/python/dicomifier/bruker_to_dicom/modules/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def get_frame_index(generator, frame_index):
contribution = [
[fg[1], frame_index[i]]
for i, fg in enumerate(generator.frame_groups)
if fg[1] != 'FG_SLICE']
if fg[1] != "FG_SLICE"]

return odil.DataSet(
PurposeOfReferenceCodeSequence=[odil.DataSet(
Expand Down
4 changes: 2 additions & 2 deletions src/python/dicomifier/commands/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,9 @@ def __init__(self, argument):
else:
try:
tag = int(items[0], 16)
except ValueError:
except ValueError as e:
raise argparse.ArgumentTypeError(
f"Invalid DICOM tag '{items[0]}'")
f"Invalid DICOM tag '{items[0]}'") from e
else:
self.tag = odil.Tag(tag)

Expand Down
8 changes: 2 additions & 6 deletions src/python/dicomifier/dicom_to_nifti/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def __init__(self, data_set):

self.prefix = "{}: ".format(" / ".join(elements))
except Exception as e:
logger.debug("Series context configuration error: \"%s\"", e)
logger.debug('Series context configuration error: "%s"', e)
self.prefix = ""

def filter(self, record):
Expand Down Expand Up @@ -215,11 +215,7 @@ def convert_series_data_sets(data_sets, dtype=None, extra_splitters=None):

# Update progress information
if len(stacks) > 1:
stack_info = "{}/{}".format(1 + stack_index, len(stacks))
else:
stack_info = ""
if stack_info:
logger.debug("Converting stack %s", stack_info)
logger.debug(f"Converting stack {1 + stack_index}/{len(stacks)}")

sort(key, stack)
nifti_meta_data = meta_data.get_meta_data(stack, meta_data_cache)
Expand Down
7 changes: 3 additions & 4 deletions src/python/dicomifier/dicom_to_nifti/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import math
import re

import nibabel
import numpy
import odil

Expand Down Expand Up @@ -40,7 +39,7 @@ def get_slice_image(data_set, shape, cache=None):
siemens_data = siemens.parse_csa(item.get_memory_view().tobytes())

number_of_tiles = siemens_data["NumberOfImagesInMosaic"][0]
tiles_per_line = int(math.ceil(math.sqrt(number_of_tiles)))
tiles_per_line = math.ceil(math.sqrt(number_of_tiles))

# Re-arrange array so that tiles are contiguous
array = array.reshape(
Expand Down Expand Up @@ -107,7 +106,7 @@ def get_shape(stack):
siemens_data = siemens.parse_csa(item.get_memory_view().tobytes())

number_of_tiles = siemens_data["NumberOfImagesInMosaic"][0]
tiles_per_line = int(math.ceil(math.sqrt(number_of_tiles)))
tiles_per_line = math.ceil(math.sqrt(number_of_tiles))

rows = data_set[odil.registry.Rows][0] // tiles_per_line
cols = data_set[odil.registry.Columns][0] // tiles_per_line
Expand Down Expand Up @@ -311,7 +310,7 @@ def get_geometry(stack):
siemens_data = siemens.parse_csa(item.get_memory_view().tobytes())

number_of_tiles = siemens_data["NumberOfImagesInMosaic"][0]
tiles_per_line = int(math.ceil(math.sqrt(number_of_tiles)))
tiles_per_line = math.ceil(math.sqrt(number_of_tiles))

# Get the origin of the tiles (i.e. origin of the first tile), cf.
# http://nipy.org/nibabel/dicom/dicom_mosaic.html
Expand Down
36 changes: 17 additions & 19 deletions src/python/dicomifier/dicom_to_nifti/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,16 +62,14 @@ def get_dicomdir_files(path):
""" Return the list of files indexed in a DICOMDIR file.
"""

dicom_files = []
dicomdir = odil.Reader.read_file(path)[1]
for record in dicomdir[odil.registry.DirectoryRecordSequence]:
if record[odil.registry.DirectoryRecordType][0] == b"IMAGE":
dicom_files.append(
os.path.join(
os.path.dirname(path),
*[x.decode() for x in record[odil.registry.ReferencedFileID]]))

return dicom_files
return [
os.path.join(
os.path.dirname(path),
*[x.decode() for x in record[odil.registry.ReferencedFileID]])
for record in dicomdir[odil.registry.DirectoryRecordSequence]
if record[odil.registry.DirectoryRecordType][0] == b"IMAGE"
]

def write_nifti(nifti_data, destination, zip, series_directory=None):
""" Write the NIfTI image and meta-data in the given destination.
Expand Down Expand Up @@ -146,28 +144,28 @@ def get_first_item(item):

# Patient directory: <PatientName> or <PatientID> or <StudyInstanceUID>.
patient_directory = None
if "PatientName" in meta_data and meta_data["PatientName"]:
if meta_data.get("PatientName"):
patient_directory = get_first_item(
meta_data["PatientName"])["Alphabetic"]
elif "PatientID" in meta_data and meta_data["PatientID"]:
elif meta_data.get("PatientID"):
patient_directory = meta_data["PatientID"][0]
elif "StudyInstanceUID" in meta_data and meta_data["StudyInstanceUID"]:
elif meta_data.get("StudyInstanceUID"):
patient_directory = meta_data["StudyInstanceUID"][0]
else:
raise Exception("Cannot determine patient directory")

# Study directory: <StudyID>_<StudyDescription>, both parts are
# optional. If both tags are missing or empty, raise an exception
study_directory = []
if "StudyID" in meta_data and meta_data["StudyID"]:
if meta_data.get("StudyID"):
study_directory.append(
numpy.ravel([x for x in meta_data["StudyID"] if x])[0])
if "StudyDescription" in meta_data and meta_data["StudyDescription"]:
if meta_data.get("StudyDescription"):
study_directory.append(
numpy.ravel([x for x in meta_data["StudyDescription"] if x])[0])

if not study_directory:
if "StudyInstanceUID" in meta_data and meta_data["StudyInstanceUID"]:
if meta_data.get("StudyInstanceUID"):
study_directory = [meta_data["StudyInstanceUID"][0]]
else:
raise Exception("Cannot determine study directory")
Expand All @@ -177,18 +175,18 @@ def get_first_item(item):
# Study directory: <SeriesNumber>_<SeriesDescription>, both parts are
# optional. If both tags are missing or empty, raise an exception
series_directory = []
if "SeriesNumber" in meta_data and meta_data["SeriesNumber"]:
if meta_data.get("SeriesNumber"):
series_directory.append(str(get_series_number(meta_data)))
if "SeriesDescription" in meta_data and meta_data["SeriesDescription"]:
if meta_data.get("SeriesDescription"):
series_directory.append(
numpy.ravel(
[x for x in meta_data["SeriesDescription"] if x])[0])
elif "ProtocolName" in meta_data and meta_data["ProtocolName"]:
elif meta_data.get("ProtocolName"):
series_directory.append(
numpy.ravel([x for x in meta_data["ProtocolName"] if x])[0])

if not series_directory:
if "SeriesInstanceUID" in meta_data and meta_data["SeriesInstanceUID"]:
if meta_data.get("SeriesInstanceUID"):
series_directory = [meta_data["SeriesInstanceUID"][0]]
else:
raise Exception("Cannot determine series directory")
Expand Down
8 changes: 3 additions & 5 deletions src/python/dicomifier/dicom_to_nifti/meta_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def get_meta_data(stack, cache=None):
for i, (data_set, frame) in enumerate(stack):
try:
sop_instance_uid = data_set[odil.registry.SOPInstanceUID][0]
except Exception as e:
except Exception:
raise
# Fetch non-frame-specific elements from cache
for tag, element in cache[sop_instance_uid].items():
Expand Down Expand Up @@ -170,7 +170,7 @@ def get_tag_name(tag):

try:
tag_name = tag.get_name()
except odil.Exception as e:
except odil.Exception:
tag_name = str(tag)
return tag_name

Expand All @@ -182,9 +182,7 @@ def convert_element(element, specific_character_set):

result = None
# element can be None because a get is called above
if element is None:
result = None
elif element.empty():
if element is None or element.empty():
result = None
elif element.is_int() or element.is_real():
result = list(element)
Expand Down
12 changes: 6 additions & 6 deletions src/python/dicomifier/dicom_to_nifti/series/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ def split_series(files):
"""

logger.info(
"Splitting %d DICOM file%s in series",
len(files), "s" if len(files) > 1 else "")
"Splitting {} DICOM file{} in series".format(
len(files), "s" if len(files) > 1 else ""))

series = {}
for file_ in files:
Expand All @@ -31,7 +31,7 @@ def split_series(files):
file_,
halt_condition=lambda x: x>odil.registry.SeriesInstanceUID)
except odil.Exception as e:
logger.warning("Could not read %s: %s" % (file_, e))
logger.warning(f"Could not read {file_}: {e}")
continue

uncompressed_ts = [
Expand All @@ -43,7 +43,7 @@ def split_series(files):
]]
if header[odil.registry.TransferSyntaxUID][0] not in uncompressed_ts:
logger.warning(
"Could not read %s: compressed transfer syntax", file_)
f"Could not read {file_}: compressed transfer syntax")
continue

series_instance_uid = None
Expand All @@ -55,13 +55,13 @@ def split_series(files):
series_instance_uid = finder(data_set)
except Exception as e:
logger.warning(
"Could not run %s: %s" % (finder_class.__name__, e))
f"Could not run {finder_class.__name__}: {e}")
continue
if series_instance_uid is not None:
break

if series_instance_uid is None:
logger.warning("Could not find a series for %s", file_)
logger.warning(f"Could not find a series for {file_}")
continue

series.setdefault(finder, []).append(file_)
Expand Down
10 changes: 5 additions & 5 deletions src/python/dicomifier/dicom_to_nifti/siemens.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def parse_value(value, name):

if value.startswith(b"0x"):
return int(value.split()[0], 16)
elif value.startswith(b"\""):
elif value.startswith(b'"'):
return re.findall(br'""(.*)""', value)[0].decode()
else:
int_match = re.match(br"^[+-]?\d+$", value)
Expand Down Expand Up @@ -138,9 +138,9 @@ def parse_item(csa, start):
length = struct.unpack(format, csa[start:start+header_size])

format = ("<" # Little endian
"{0}s" # Content
"{1}s" # Padding (?)
).format(length[1], (4-length[1]%4)%4)
f"{length[1]}s" # Content
f"{(4-length[1]%4)%4}s" # Padding (?)
)
content_size = struct.calcsize(format)
content, padding = struct.unpack(
format, csa[start+header_size:start+header_size+content_size])
Expand Down Expand Up @@ -206,7 +206,7 @@ def value_parser(type_, value):
element)
is_array, type_, name, index = match.groups()

full_name = "{}{}{}".format(is_array.decode(), type_.decode(), name.decode())
full_name = f"{is_array.decode()}{type_.decode()}{name.decode()}"

is_array = (is_array == b"a")
if index:
Expand Down
6 changes: 3 additions & 3 deletions src/python/dicomifier/dicom_to_nifti/stacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,10 +91,10 @@ def build_selector(

# Normalize the keys so that all stacks have the same key fields
key_items = set()
for key in stacks.keys():
for key in stacks:
key_items.update(x[0] for x in key)
normalized_keys = {}
for key in stacks.keys():
for key in stacks:
normalized_keys[key] = list(key)
for key_item in key_items:
if key_item not in [x[0] for x in key]:
Expand Down Expand Up @@ -559,7 +559,7 @@ def _find_private_creator(data_set, private_creator, group):

tag = odil.Tag(group, 0x0000)
private_group = None
for element in range(0, 256):
for element in range(256):
tag.element = element

# Get the content of the potential private creator. It may be stored as
Expand Down
6 changes: 3 additions & 3 deletions tests/diff/to_dicom.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def main():
"to-dicom", "--dicomdir"]
+arguments
+[case_input, case_output])
except subprocess.CalledProcessError as e:
except subprocess.CalledProcessError:
return 1

different |= diff_directories(case_baseline, case_output)
Expand Down Expand Up @@ -92,8 +92,8 @@ def diff_directories(baseline, test):
for filename in filenames:
if not os.path.isfile(os.path.join(baseline_pathname, filename)):
different = True
print("{} missing in baseline".format(
os.path.join(relative_pathname, filename)))
relative_filename = os.path.join(relative_pathname, filename)
print(f"{relative_filename} missing in baseline")

return different

Expand Down
Loading