Skip to content

Commit 204e955

Browse files
authored
Merge pull request #27 from jond01/master
Maintenance - python 3.6+, pydicom 1.0+
2 parents fe23dc5 + 5e3016d commit 204e955

File tree

12 files changed

+63
-69
lines changed

12 files changed

+63
-69
lines changed

.travis.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
sudo: false
22
language: python
33
python:
4-
- "2.7"
5-
- "3.5"
64
- "3.6"
75
- "3.7"
6+
- "3.8"
7+
- "3.9"
88
install: pip install tox-travis
99
script: tox
1010
notifications:

LICENSE.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
Copyright (c) 2017 Innolitics, LLC.
1+
Copyright (c) 2017-2021 Innolitics, LLC.
22

33
Permission is hereby granted, free of charge, to any person obtaining a copy of
44
this software and associated documentation files (the "Software"), to deal in

dicom_numpy/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,11 @@
11
from .combine_slices import combine_slices, sort_by_slice_position
22
from .exceptions import DicomImportException
33

4+
__version__ = '0.5.0'
5+
46
__all__ = [
57
'combine_slices',
68
'sort_by_slice_position',
79
'DicomImportException',
10+
'__version__'
811
]

dicom_numpy/combine_slices.py

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
import logging
2+
from math import isclose
23

34
import numpy as np
45

5-
from .utils import isclose
66
from .exceptions import DicomImportException
77

88

@@ -180,19 +180,19 @@ def _validate_image_orientation(image_orientation):
180180
row_cosine, column_cosine, slice_cosine = _extract_cosines(image_orientation)
181181

182182
if not _almost_zero(np.dot(row_cosine, column_cosine), 1e-4):
183-
raise DicomImportException("Non-orthogonal direction cosines: {}, {}".format(row_cosine, column_cosine))
183+
raise DicomImportException(f"Non-orthogonal direction cosines: {row_cosine}, {column_cosine}")
184184
elif not _almost_zero(np.dot(row_cosine, column_cosine), 1e-8):
185-
logger.warning("Direction cosines aren't quite orthogonal: {}, {}".format(row_cosine, column_cosine))
185+
logger.warning(f"Direction cosines aren't quite orthogonal: {row_cosine}, {column_cosine}")
186186

187187
if not _almost_one(np.linalg.norm(row_cosine), 1e-4):
188-
raise DicomImportException("The row direction cosine's magnitude is not 1: {}".format(row_cosine))
188+
raise DicomImportException(f"The row direction cosine's magnitude is not 1: {row_cosine}")
189189
elif not _almost_one(np.linalg.norm(row_cosine), 1e-8):
190-
logger.warning("The row direction cosine's magnitude is not quite 1: {}".format(row_cosine))
190+
logger.warning(f"The row direction cosine's magnitude is not quite 1: {row_cosine}")
191191

192192
if not _almost_one(np.linalg.norm(column_cosine), 1e-4):
193-
raise DicomImportException("The column direction cosine's magnitude is not 1: {}".format(column_cosine))
193+
raise DicomImportException(f"The column direction cosine's magnitude is not 1: {column_cosine}")
194194
elif not _almost_one(np.linalg.norm(column_cosine), 1e-8):
195-
logger.warning("The column direction cosine's magnitude is not quite 1: {}".format(column_cosine))
195+
logger.warning(f"The column direction cosine's magnitude is not quite 1: {column_cosine}")
196196

197197

198198
def _almost_zero(value, abs_tol):
@@ -215,17 +215,18 @@ def _slice_attribute_equal(slice_datasets, property_name):
215215
for dataset in slice_datasets[1:]:
216216
value = getattr(dataset, property_name, None)
217217
if value != initial_value:
218-
msg = 'All slices must have the same value for "{}": {} != {}'
219-
raise DicomImportException(msg.format(property_name, value, initial_value))
218+
msg = f'All slices must have the same value for "{property_name}": {value} != {initial_value}'
219+
raise DicomImportException(msg)
220220

221221

222222
def _slice_ndarray_attribute_almost_equal(slice_datasets, property_name, abs_tol):
223223
initial_value = getattr(slice_datasets[0], property_name, None)
224224
for dataset in slice_datasets[1:]:
225225
value = getattr(dataset, property_name, None)
226226
if not np.allclose(value, initial_value, atol=abs_tol):
227-
msg = 'All slices must have the same value for "{}" within "{}": {} != {}'
228-
raise DicomImportException(msg.format(property_name, abs_tol, value, initial_value))
227+
msg = (f'All slices must have the same value for "{property_name}" within "{abs_tol}": {value} != '
228+
f'{initial_value}')
229+
raise DicomImportException(msg)
229230

230231

231232
def _slice_positions(slice_datasets):
@@ -239,8 +240,8 @@ def _check_for_missing_slices(slice_positions):
239240
slice_positions_diffs = np.diff(sorted(slice_positions))
240241
if not np.allclose(slice_positions_diffs, slice_positions_diffs[0], atol=0, rtol=1e-5):
241242
# TODO: figure out how we should handle non-even slice spacing
242-
msg = "The slice spacing is non-uniform. Slice spacings:\n{}"
243-
logger.warning(msg.format(slice_positions_diffs))
243+
msg = f"The slice spacing is non-uniform. Slice spacings:\n{slice_positions_diffs}"
244+
logger.warning(msg)
244245

245246
if not np.allclose(slice_positions_diffs, slice_positions_diffs[0], atol=0, rtol=1e-1):
246247
raise DicomImportException('It appears there are missing slices')

dicom_numpy/utils.py

Lines changed: 0 additions & 7 deletions
This file was deleted.

dicom_numpy/zip_archive.py

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,7 @@
22
import logging
33
import tempfile
44

5-
try:
6-
import pydicom as dicom
7-
except ImportError:
8-
import dicom
5+
import pydicom
96

107
from .exceptions import DicomImportException
118
from .combine_slices import combine_slices
@@ -15,10 +12,10 @@
1512

1613

1714
def combined_series_from_zip(zip_filename):
18-
logger.info('Extracting voxel data from "{}"'.format(zip_filename))
15+
logger.info(f'Extracting voxel data from "{zip_filename}"')
1916

2017
if not zipfile.is_zipfile(zip_filename):
21-
raise DicomImportException('Invalid zipfile {}'.format(zip_filename))
18+
raise DicomImportException(f'Invalid zipfile {zip_filename}')
2219

2320
with zipfile.ZipFile(zip_filename, 'r') as zip_file:
2421
datasets = dicom_datasets_from_zip(zip_file)
@@ -36,21 +33,21 @@ def dicom_datasets_from_zip(zip_file):
3633
entry_pseudo_file = zip_file.open(entry)
3734

3835
# the pseudo file does not support `seek`, which is required by
39-
# dicom's lazy loading mechanism; use temporary files to get around this;
36+
# pydicom's lazy loading mechanism; use temporary files to get around this;
4037
# relies on the temporary files not being removed until the temp
4138
# file is garbage collected, which should be the case because the
42-
# dicom datasets should retain a reference to the temp file
39+
# pydicom datasets should retain a reference to the temp file
4340
temp_file = tempfile.TemporaryFile()
4441
temp_file.write(entry_pseudo_file.read())
4542
temp_file.flush()
4643
temp_file.seek(0)
4744

4845
try:
49-
dataset = dicom.read_file(temp_file)
46+
dataset = pydicom.read_file(temp_file)
5047
datasets.append(dataset)
51-
except dicom.errors.InvalidDicomError as e:
52-
msg = 'Skipping invalid DICOM file "{}": {}'
53-
logger.info(msg.format(entry, e))
48+
except pydicom.errors.InvalidDicomError as e:
49+
msg = f'Skipping invalid DICOM file "{entry}": {e}'
50+
logger.info(msg)
5451

5552
if len(datasets) == 0:
5653
raise DicomImportException('Zipfile does not contain any valid DICOM files')

docs/requirements.txt

Lines changed: 0 additions & 1 deletion
This file was deleted.

docs/source/conf.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,7 @@
2020
import os
2121
import sys
2222

23-
try:
24-
from unittest.mock import MagicMock
25-
except ImportError:
26-
from mock import Mock as MagicMock
23+
from unittest.mock import MagicMock
2724

2825
sys.path.insert(0, os.path.abspath('../..'))
2926

@@ -34,7 +31,7 @@ def __getattr__(cls, name):
3431
return MagicMock()
3532

3633

37-
MOCK_MODULES = ['numpy', 'dicom']
34+
MOCK_MODULES = ['numpy', 'pydicom']
3835
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
3936

4037

docs/source/index.rst

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ DICOM-Numpy
44

55
This python module provides a set of utilities for extracting data contained in
66
DICOM files into Numpy ndarrays. It is a higher-level library that builds on the excellent lower-level `pydicom
7-
<http://pydicom.readthedocs.io/en/stable/>`_ library.
7+
<https://pydicom.github.io/pydicom/>`_ library.
88

99
The library is quite small at the moment, however, if you have a DICOM-related
1010
utility function that you think would be appropriate to include, create a
@@ -13,9 +13,9 @@ Github Issue!
1313
Dependencies
1414
============
1515

16-
- Python 2.7 or Python 3.5+
16+
- Python 3.6+
1717
- Numpy
18-
- PyDicom
18+
- PyDicom 1.0+
1919

2020

2121
Installation
@@ -41,18 +41,18 @@ single 3D image into a single scan.
4141

4242
The function that performs this task is `combine_slices`. Since this library
4343
builds on pydicom, `combine_slices` takes an list of `pydicom
44-
datasets <http://pydicom.readthedocs.io/en/stable/pydicom_user_guide.html#dataset>`_.
44+
datasets <https://pydicom.github.io/pydicom/stable/old/base_element.html#dataset>`_.
4545

4646
Example
4747
-------
4848

4949
.. code:: python
5050
51-
import dicom
51+
import pydicom
5252
import dicom_numpy
5353
5454
def extract_voxel_data(list_of_dicom_files):
55-
datasets = [dicom.read_file(f) for f in list_of_dicom_files]
55+
datasets = [pydicom.dcmread(f) for f in list_of_dicom_files]
5656
try:
5757
voxel_ndarray, ijk_to_xyz = dicom_numpy.combine_slices(datasets)
5858
except dicom_numpy.DicomImportException as e:

setup.py

Lines changed: 11 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -3,17 +3,18 @@
33
"""
44

55
from setuptools import setup, find_packages
6-
from codecs import open
76
from os import path
87

8+
from dicom_numpy import __version__
9+
910
here = path.abspath(path.dirname(__file__))
1011

1112
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
1213
long_description = f.read()
1314

1415
setup(
1516
name='dicom_numpy',
16-
version='0.5.0',
17+
version=__version__,
1718
description='Extract image data into a 3D numpy array from a set of DICOM files.',
1819
long_description=long_description,
1920
url='https://github.com/innolitics/dicom-numpy',
@@ -30,29 +31,26 @@
3031

3132
'License :: OSI Approved :: MIT License',
3233

33-
'Programming Language :: Python :: 2',
34-
'Programming Language :: Python :: 2.7',
3534
'Programming Language :: Python :: 3',
36-
'Programming Language :: Python :: 3.5',
3735
'Programming Language :: Python :: 3.6',
3836
'Programming Language :: Python :: 3.7',
37+
'Programming Language :: Python :: 3.8',
38+
'Programming Language :: Python :: 3.9'
3939
],
4040

4141
keywords='dicom numpy',
4242

4343
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
4444

4545
install_requires=[
46-
'pydicom',
46+
'pydicom >= 1.0',
4747
'numpy',
4848
],
4949

50-
extras_require={
51-
'dev': ['check-manifest', 'sphinx', 'sphinx-autobuild', 'mock'],
52-
'test': ['coverage', 'pytest'],
53-
},
50+
python_requires='>= 3.6',
5451

55-
package_data={},
56-
data_files=[],
57-
entry_points={},
52+
extras_require={
53+
'dev': ['check-manifest', 'sphinx', 'sphinx-autobuild'],
54+
'test': ['coverage', 'pytest']
55+
}
5856
)

0 commit comments

Comments
 (0)