diff --git a/nibabel/__init__.py b/nibabel/__init__.py index 2df9a1c534..779f6e8587 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -61,7 +61,7 @@ from .orientations import (io_orientation, orientation_affine, flip_axis, OrientationError, apply_orientation, aff2axcodes) -from .imageclasses import class_map, ext_map +from .imageclasses import class_map, ext_map, all_image_classes from . import trackvis from . import mriutils diff --git a/nibabel/analyze.py b/nibabel/analyze.py index 982da58be9..7aadcd423c 100644 --- a/nibabel/analyze.py +++ b/nibabel/analyze.py @@ -900,6 +900,9 @@ class AnalyzeImage(SpatialImage): files_types = (('image', '.img'), ('header', '.hdr')) _compressed_exts = ('.gz', '.bz2') + makeable = True + rw = True + ImageArrayProxy = ArrayProxy def __init__(self, dataobj, affine, header=None, diff --git a/nibabel/filename_parser.py b/nibabel/filename_parser.py index bc21cbc872..8965ed53e3 100644 --- a/nibabel/filename_parser.py +++ b/nibabel/filename_parser.py @@ -131,6 +131,8 @@ def types_filenames(template_fname, types_exts, elif found_ext == found_ext.lower(): proc_ext = lambda s: s.lower() for name, ext in types_exts: + if name in tfns: # Allow multipe definitions of image, header, etc, + continue # giving priority to those found first. if name == direct_set_name: tfns[name] = template_fname continue diff --git a/nibabel/freesurfer/mghformat.py b/nibabel/freesurfer/mghformat.py index e84f8e2319..9a67f1c7fe 100644 --- a/nibabel/freesurfer/mghformat.py +++ b/nibabel/freesurfer/mghformat.py @@ -454,14 +454,19 @@ def writeftr_to(self, fileobj): fileobj.write(ftr_nd.tostring()) +@valid_exts('.mgh', '.mgz') @ImageOpener.register_ext_from_image('.mgz', ImageOpener.gz_def) class MGHImage(SpatialImage): """ Class for MGH format image """ header_class = MGHHeader - files_types = (('image', '.mgh'),) + files_types = (('image', '.mgh'), + ('image', '.mgz')) _compressed_exts = (('.gz',)) + makeable = True + rw = True + ImageArrayProxy = ArrayProxy @classmethod diff --git a/nibabel/imageclasses.py b/nibabel/imageclasses.py index 31a219482c..189fdd95b4 100644 --- a/nibabel/imageclasses.py +++ b/nibabel/imageclasses.py @@ -7,11 +7,15 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## ''' Define supported image classes and names ''' +import warnings + from .analyze import AnalyzeImage from .spm99analyze import Spm99AnalyzeImage from .spm2analyze import Spm2AnalyzeImage from .nifti1 import Nifti1Pair, Nifti1Image +from .nifti2 import Nifti2Pair, Nifti2Image from .minc1 import Minc1Image +from .minc2 import Minc2Image from .freesurfer import MGHImage from .parrec import PARRECImage from .volumeutils import Recoder @@ -19,57 +23,73 @@ _, have_scipy, _ = optional_package('scipy') -# mapping of names to classes and class functionality +# Ordered by the load/save priority. +all_image_classes = [Nifti1Pair, Nifti1Image, Nifti2Pair, Nifti2Image, + Spm2AnalyzeImage, Spm99AnalyzeImage, AnalyzeImage, + Minc1Image, Minc2Image, MGHImage, + PARRECImage] + + +# DEPRECATED: mapping of names to classes and class functionality +class ClassMapDict(dict): + def __getitem__(self, *args, **kwargs): + warnings.warn("class_map is deprecated.", DeprecationWarning) + return super(ClassMapDict, self).__getitem__(*args, **kwargs) + +class_map = ClassMapDict( + analyze={'class': AnalyzeImage, # Image class + 'ext': '.img', # characteristic image extension + 'has_affine': False, # class can store an affine + 'makeable': True, # empty image can be easily made in memory + 'rw': True}, # image can be written + spm99analyze={'class': Spm99AnalyzeImage, + 'ext': '.img', + 'has_affine': True, + 'makeable': True, + 'rw': have_scipy}, + spm2analyze={'class': Spm2AnalyzeImage, + 'ext': '.img', + 'has_affine': True, + 'makeable': True, + 'rw': have_scipy}, + nifti_pair={'class': Nifti1Pair, + 'ext': '.img', + 'has_affine': True, + 'makeable': True, + 'rw': True}, + nifti_single={'class': Nifti1Image, + 'ext': '.nii', + 'has_affine': True, + 'makeable': True, + 'rw': True}, + minc={'class': Minc1Image, + 'ext': '.mnc', + 'has_affine': True, + 'makeable': True, + 'rw': False}, + mgh={'class': MGHImage, + 'ext': '.mgh', + 'has_affine': True, + 'makeable': True, + 'rw': True}, + mgz={'class': MGHImage, + 'ext': '.mgz', + 'has_affine': True, + 'makeable': True, + 'rw': True}, + par={'class': PARRECImage, + 'ext': '.par', + 'has_affine': True, + 'makeable': False, + 'rw': False}) -class_map = { - 'analyze': {'class': AnalyzeImage, # Image class - 'ext': '.img', # characteristic image extension - 'has_affine': False, # class can store an affine - 'makeable': True, # empty image can be easily made in memory - 'rw': True}, # image can be written - 'spm99analyze': {'class': Spm99AnalyzeImage, - 'ext': '.img', - 'has_affine': True, - 'makeable': True, - 'rw': have_scipy}, - 'spm2analyze': {'class': Spm2AnalyzeImage, - 'ext': '.img', - 'has_affine': True, - 'makeable': True, - 'rw': have_scipy}, - 'nifti_pair': {'class': Nifti1Pair, - 'ext': '.img', - 'has_affine': True, - 'makeable': True, - 'rw': True}, - 'nifti_single': {'class': Nifti1Image, - 'ext': '.nii', - 'has_affine': True, - 'makeable': True, - 'rw': True}, - 'minc': {'class': Minc1Image, - 'ext': '.mnc', - 'has_affine': True, - 'makeable': True, - 'rw': False}, - 'mgh': {'class': MGHImage, - 'ext': '.mgh', - 'has_affine': True, - 'makeable': True, - 'rw': True}, - 'mgz': {'class': MGHImage, - 'ext': '.mgz', - 'has_affine': True, - 'makeable': True, - 'rw': True}, - 'par': {'class': PARRECImage, - 'ext': '.par', - 'has_affine': True, - 'makeable': False, - 'rw': False}} +class ExtMapRecoder(Recoder): + def __getitem__(self, *args, **kwargs): + warnings.warn("ext_map is deprecated.", DeprecationWarning) + return super(ExtMapRecoder, self).__getitem__(*args, **kwargs) # mapping of extensions to default image class names -ext_map = Recoder(( +ext_map = ExtMapRecoder(( ('nifti_single', '.nii'), ('nifti_pair', '.img', '.hdr'), ('minc', '.mnc'), diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py index 018907d7bb..20e9815122 100644 --- a/nibabel/loadsave.py +++ b/nibabel/loadsave.py @@ -11,17 +11,10 @@ import numpy as np -from .filename_parser import types_filenames, splitext_addext +from .filename_parser import splitext_addext from .openers import ImageOpener -from .analyze import AnalyzeImage -from .spm2analyze import Spm2AnalyzeImage -from .nifti1 import Nifti1Image, Nifti1Pair, header_dtype as ni1_hdr_dtype -from .nifti2 import Nifti2Image, Nifti2Pair -from .minc1 import Minc1Image -from .minc2 import Minc2Image -from .freesurfer import MGHImage from .spatialimages import ImageFileError -from .imageclasses import class_map, ext_map +from .imageclasses import all_image_classes from .arrayproxy import is_proxy @@ -40,55 +33,15 @@ def load(filename, **kwargs): img : ``SpatialImage`` Image of guessed type ''' - return guessed_image_type(filename).from_filename(filename, **kwargs) + sniff = None + for image_klass in all_image_classes: + is_valid, sniff = image_klass.is_image(filename, sniff) + if is_valid: + return image_klass.from_filename(filename, **kwargs) -def guessed_image_type(filename): - """ Guess image type from file `filename` - - Parameters - ---------- - filename : str - File name containing an image - - Returns - ------- - image_class : class - Class corresponding to guessed image type - """ - froot, ext, trailing = splitext_addext(filename, ('.gz', '.bz2')) - lext = ext.lower() - try: - img_type = ext_map[lext] - except KeyError: - raise ImageFileError('Cannot work out file type of "%s"' % - filename) - if lext in ('.mgh', '.mgz', '.par'): - klass = class_map[img_type]['class'] - elif lext == '.mnc': - # Look for HDF5 signature for MINC2 - # https://www.hdfgroup.org/HDF5/doc/H5.format.html - with ImageOpener(filename) as fobj: - signature = fobj.read(4) - klass = Minc2Image if signature == b'\211HDF' else Minc1Image - elif lext == '.nii': - with ImageOpener(filename) as fobj: - binaryblock = fobj.read(348) - ft = which_analyze_type(binaryblock) - klass = Nifti2Image if ft == 'nifti2' else Nifti1Image - else: # might be nifti 1 or 2 pair or analyze of some sort - files_types = (('image', '.img'), ('header', '.hdr')) - filenames = types_filenames(filename, files_types) - with ImageOpener(filenames['header']) as fobj: - binaryblock = fobj.read(348) - ft = which_analyze_type(binaryblock) - if ft == 'nifti2': - klass = Nifti2Pair - elif ft == 'nifti1': - klass = Nifti1Pair - else: - klass = Spm2AnalyzeImage - return klass + raise ImageFileError('Cannot work out file type of "%s"' % + filename) def save(img, filename): @@ -105,25 +58,38 @@ def save(img, filename): ------- None ''' + + # Save the type as expected try: img.to_filename(filename) except ImageFileError: pass else: return - froot, ext, trailing = splitext_addext(filename, ('.gz', '.bz2')) + + # Be nice to users by making common implicit conversions + froot, ext, trailing = splitext_addext(filename, img._compressed_exts) + lext = ext.lower() + # Special-case Nifti singles and Pairs - if type(img) == Nifti1Image and ext in ('.img', '.hdr'): + from .nifti1 import Nifti1Image, Nifti1Pair # Inline imports, as this module + from .nifti2 import Nifti2Image, Nifti2Pair # really shouldn't reference any image type + if type(img) == Nifti1Image and lext in ('.img', '.hdr'): klass = Nifti1Pair - elif type(img) == Nifti2Image and ext in ('.img', '.hdr'): + elif type(img) == Nifti2Image and lext in ('.img', '.hdr'): klass = Nifti2Pair - elif type(img) == Nifti1Pair and ext == '.nii': + elif type(img) == Nifti1Pair and lext == '.nii': klass = Nifti1Image - elif type(img) == Nifti2Pair and ext == '.nii': + elif type(img) == Nifti2Pair and lext == '.nii': klass = Nifti2Image - else: - img_type = ext_map[ext] - klass = class_map[img_type]['class'] + else: # arbitrary conversion + valid_klasses = [klass for klass in all_image_classes + if klass.is_valid_extension(ext)] + try: + klass = valid_klasses[0] + except IndexError: # if list is empty + raise ImageFileError('Cannot work out file type of "%s"' % + filename) converted = klass.from_image(img) converted.to_filename(filename) @@ -212,43 +178,3 @@ def read_img_data(img, prefer='scaled'): if prefer == 'scaled': return hdr.data_from_fileobj(fileobj) return hdr.raw_data_from_fileobj(fileobj) - - -def which_analyze_type(binaryblock): - """ Is `binaryblock` from NIfTI1, NIfTI2 or Analyze header? - - Parameters - ---------- - binaryblock : bytes - The `binaryblock` is 348 bytes that might be NIfTI1, NIfTI2, Analyze, - or None of the the above. - - Returns - ------- - hdr_type : str - * a nifti1 header (pair or single) -> return 'nifti1' - * a nifti2 header (pair or single) -> return 'nifti2' - * an Analyze header -> return 'analyze' - * None of the above -> return None - - Notes - ----- - Algorithm: - - * read in the first 4 bytes from the file as 32-bit int ``sizeof_hdr`` - * if ``sizeof_hdr`` is 540 or byteswapped 540 -> assume nifti2 - * Check for 'ni1', 'n+1' magic -> assume nifti1 - * if ``sizeof_hdr`` is 348 or byteswapped 348 assume Analyze - * Return None - """ - hdr = np.ndarray(shape=(), dtype=ni1_hdr_dtype, buffer=binaryblock) - bs_hdr = hdr.byteswap() - sizeof_hdr = hdr['sizeof_hdr'] - bs_sizeof_hdr = bs_hdr['sizeof_hdr'] - if 540 in (sizeof_hdr, bs_sizeof_hdr): - return 'nifti2' - if hdr['magic'] in (b'ni1', b'n+1'): - return 'nifti1' - if 348 in (sizeof_hdr, bs_sizeof_hdr): - return 'analyze' - return None diff --git a/nibabel/minc1.py b/nibabel/minc1.py index d646397ee5..89da3359ee 100644 --- a/nibabel/minc1.py +++ b/nibabel/minc1.py @@ -14,6 +14,7 @@ from .externals.netcdf import netcdf_file +from .filename_parser import splitext_addext from .spatialimages import Header, SpatialImage from .fileslice import canonical_slicers @@ -270,6 +271,9 @@ class MincHeader(Header): # We don't use the data layout - this just in case we do later data_layout = 'C' + # Number of bytes needed to distinguish Minc1 and Minc2 headers + sniff_size = 4 + def data_to_fileobj(self, data, fileobj, rescale=True): """ See Header class for an implementation we can't use """ raise NotImplementedError @@ -279,6 +283,12 @@ def data_from_fileobj(self, fileobj): raise NotImplementedError +class Minc1Header(MincHeader): + @classmethod + def is_header(klass, binaryblock): + return binaryblock[:4] != b'\211HDF' + + class Minc1Image(SpatialImage): ''' Class for MINC1 format images @@ -286,10 +296,13 @@ class Minc1Image(SpatialImage): MINC header type - and reads the relevant information from the MINC file on load. ''' - header_class = MincHeader + header_class = Minc1Header files_types = (('image', '.mnc'),) _compressed_exts = ('.gz', '.bz2') + makeable = True + rw = False + ImageArrayProxy = MincImageArrayProxy @classmethod diff --git a/nibabel/minc2.py b/nibabel/minc2.py index a8a69ebd23..635abc3a99 100644 --- a/nibabel/minc2.py +++ b/nibabel/minc2.py @@ -30,7 +30,7 @@ from .optpkg import optional_package h5py, have_h5py, setup_module = optional_package('h5py') -from .minc1 import Minc1File, Minc1Image, MincError +from .minc1 import Minc1File, MincHeader, Minc1Image, MincError class Hdf5Bunch(object): @@ -134,6 +134,12 @@ def get_scaled_data(self, sliceobj=()): return self._normalize(raw_data, sliceobj) +class Minc2Header(MincHeader): + @classmethod + def is_header(klass, binaryblock): + return binaryblock[:4] == b'\211HDF' + + class Minc2Image(Minc1Image): ''' Class for MINC2 images @@ -143,6 +149,7 @@ class Minc2Image(Minc1Image): ''' # MINC2 does not do compressed whole files _compressed_exts = () + header_class = Minc2Header @classmethod def from_file_map(klass, file_map): diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index e2666609e2..b8a0fc7555 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -558,6 +558,9 @@ class Nifti1Header(SpmAnalyzeHeader): pair_magic = b'ni1' single_magic = b'n+1' + # for sniffing type + sniff_size = 348 + # Quaternion threshold near 0, based on float32 precision quaternion_threshold = -np.finfo(np.float32).eps * 3 @@ -1611,6 +1614,15 @@ def _chk_xform_code(klass, code_type, hdr, fix): rep.fix_msg = 'setting to 0' return hdr, rep + @classmethod + def is_header(klass, binaryblock): + if len(binaryblock) < klass.sniff_size: + raise ValueError('Must pass a binary block >= %d bytes' % klass.sniff_size) + + hdr = np.ndarray(shape=(), dtype=header_dtype, + buffer=binaryblock[:klass.sniff_size]) + return hdr['magic'] in (b'ni1', b'n+1') + class Nifti1PairHeader(Nifti1Header): ''' Class for NIfTI1 pair header ''' @@ -1622,6 +1634,7 @@ class Nifti1Pair(analyze.AnalyzeImage): """ Class for NIfTI1 format image, header pair """ header_class = Nifti1PairHeader + rw = True def __init__(self, dataobj, affine, header=None, extra=None, file_map=None): diff --git a/nibabel/nifti2.py b/nibabel/nifti2.py index 89fe3345e3..54b760c02e 100644 --- a/nibabel/nifti2.py +++ b/nibabel/nifti2.py @@ -141,6 +141,9 @@ class Nifti2Header(Nifti1Header): # Size of header in sizeof_hdr field sizeof_hdr = 540 + # sniff size to determine type + sniff_size = 540 + # Quaternion threshold near 0, based on float64 preicision quaternion_threshold = -np.finfo(np.float64).eps * 3 @@ -221,6 +224,16 @@ def _chk_eol_check(hdr, fix=False): rep.fix_msg = 'setting EOL check to 13, 10, 26, 10' return hdr, rep + @classmethod + def is_header(klass, binaryblock): + if len(binaryblock) < klass.sniff_size: + raise ValueError('Must pass a binary block >= %d bytes' % klass.sniff_size) + + hdr = np.ndarray(shape=(), dtype=header_dtype, + buffer=binaryblock[:klass.sniff_size]) + bs_hdr = hdr.byteswap() + return 540 in (hdr['sizeof_hdr'], bs_hdr['sizeof_hdr']) + class Nifti2PairHeader(Nifti2Header): ''' Class for NIfTI2 pair header ''' diff --git a/nibabel/parrec.py b/nibabel/parrec.py index ef4c11c698..85fc30aa4e 100644 --- a/nibabel/parrec.py +++ b/nibabel/parrec.py @@ -1022,6 +1022,9 @@ class PARRECImage(SpatialImage): header_class = PARRECHeader files_types = (('image', '.rec'), ('header', '.par')) + makeable = False + rw = False + ImageArrayProxy = PARRECArrayProxy @classmethod diff --git a/nibabel/spatialimages.py b/nibabel/spatialimages.py index eb4befa077..12cc3ca016 100644 --- a/nibabel/spatialimages.py +++ b/nibabel/spatialimages.py @@ -137,12 +137,15 @@ except NameError: # python 3 basestring = str +import os.path import warnings import numpy as np -from .filename_parser import types_filenames, TypesFilenamesError +from .filename_parser import types_filenames, TypesFilenamesError, \ + splitext_addext from .fileholders import FileHolder +from .openers import ImageOpener from .volumeutils import shape_zoom_affine @@ -319,11 +322,14 @@ class ImageFileError(Exception): class SpatialImage(object): + ''' Template class for images ''' header_class = Header files_types = (('image', None),) _compressed_exts = () - ''' Template class for images ''' + makeable = True # Used in test code + rw = True # Used in test code + def __init__(self, dataobj, affine, header=None, extra=None, file_map=None): ''' Initialize image @@ -866,6 +872,51 @@ def from_image(klass, img): klass.header_class.from_header(img.header), extra=img.extra.copy()) + @classmethod + def is_valid_extension(klass, ext): + return np.any([ft[1] == ext.lower() for ft in klass.files_types]) + + @classmethod + def is_valid_filename(klass, filename): + froot, ext, trailing = splitext_addext(filename, klass._compressed_exts) + return klass.is_valid_extension(ext) + + @classmethod + def is_image(klass, filename, sniff=None): + froot, ext, trailing = splitext_addext(filename, klass._compressed_exts) + + if not klass.is_valid_extension(ext): + return False, sniff + elif (getattr(klass.header_class, 'sniff_size', None) is None or + getattr(klass.header_class, 'is_header', None) is None): + return True, sniff + + # Determine the metadata location, then sniff it + header_exts = [ft[1] for ft in klass.files_types if ft[0] == 'header'] + if len(header_exts) == 0: + metadata_filename = filename + else: + # Search for an acceptable existing header; + # could be compressed or not... + for ext in header_exts: + for tr_ext in np.unique([trailing, ''] + list(klass._compressed_exts)): + metadata_filename = froot + ext + tr_ext + if os.path.exists(metadata_filename): + break + + try: + if not sniff or len(sniff) < klass.header_class.sniff_size: + # 1024 == large size, for efficiency (could iterate over imageclasses). + sniff_size = np.max([1024, klass.header_class.sniff_size]) + with ImageOpener(metadata_filename, 'rb') as fobj: + sniff = fobj.read(sniff_size) + return klass.header_class.is_header(sniff), sniff + except Exception as e: + # Can happen if: file doesn't exist, + # filesize < necessary sniff size (this happens!) + # other unexpected errors. + return False, sniff + def __getitem__(self): ''' No slicing or dictionary interface for images ''' diff --git a/nibabel/spm2analyze.py b/nibabel/spm2analyze.py index 28d04bc7a1..c3decf2af8 100644 --- a/nibabel/spm2analyze.py +++ b/nibabel/spm2analyze.py @@ -35,6 +35,9 @@ class Spm2AnalyzeHeader(spm99.Spm99AnalyzeHeader): # Copies of module level definitions template_dtype = header_dtype + # binary read size to determine type + sniff_size = 348 + def get_slope_inter(self): ''' Get data scaling (slope) and intercept from header data @@ -113,12 +116,22 @@ def get_slope_inter(self): return slope, inter return None, None + @classmethod + def is_header(klass, binaryblock): + if len(binaryblock) < klass.sniff_size: + raise ValueError('Must pass a binary block >= %d bytes' % klass.sniff_size) + + hdr = np.ndarray(shape=(), dtype=header_dtype, + buffer=binaryblock[:klass.sniff_size]) + bs_hdr = hdr.byteswap() + return (binaryblock[344:348] not in (b'ni1\x00', b'n+1\x00') and + 348 in (hdr['sizeof_hdr'], bs_hdr['sizeof_hdr'])) + class Spm2AnalyzeImage(spm99.Spm99AnalyzeImage): """ Class for SPM2 variant of basic Analyze image """ header_class = Spm2AnalyzeHeader - load = Spm2AnalyzeImage.load save = Spm2AnalyzeImage.instance_to_filename diff --git a/nibabel/spm99analyze.py b/nibabel/spm99analyze.py index f3d565e41d..fdf6c2d31a 100644 --- a/nibabel/spm99analyze.py +++ b/nibabel/spm99analyze.py @@ -17,6 +17,8 @@ from .batteryrunners import Report from . import analyze # module import from .keywordonly import kw_only_meth +from .optpkg import optional_package +have_scipy = optional_package('scipy')[1] ''' Support subtle variations of SPM version of Analyze ''' header_key_dtd = analyze.header_key_dtd @@ -237,6 +239,9 @@ class Spm99AnalyzeImage(analyze.AnalyzeImage): files_types = (('image', '.img'), ('header', '.hdr'), ('mat', '.mat')) + has_affine = True + makeable = True + rw = have_scipy @classmethod @kw_only_meth(1) diff --git a/nibabel/tests/test_analyze.py b/nibabel/tests/test_analyze.py index b567bc3c21..55e7e39532 100644 --- a/nibabel/tests/test_analyze.py +++ b/nibabel/tests/test_analyze.py @@ -159,6 +159,7 @@ def test_log_checks(self): fhdr, message, raiser = self.log_chk(hdr, 40) assert_equal(message, 'data code -1 not recognized; ' 'not attempting fix') + assert_raises(*raiser) # datatype not supported hdr['datatype'] = 255 # severity 40 diff --git a/nibabel/tests/test_files_interface.py b/nibabel/tests/test_files_interface.py index 2c0bdfff0f..59839b3b96 100644 --- a/nibabel/tests/test_files_interface.py +++ b/nibabel/tests/test_files_interface.py @@ -12,7 +12,7 @@ import numpy as np -from .. import class_map, Nifti1Image, Nifti1Pair, MGHImage +from .. import Nifti1Image, Nifti1Pair, MGHImage, all_image_classes from ..externals.six import BytesIO from ..fileholders import FileHolderError @@ -25,15 +25,14 @@ def test_files_images(): # test files creation in image classes arr = np.zeros((2,3,4)) aff = np.eye(4) - for img_def in class_map.values(): - klass = img_def['class'] + for klass in all_image_classes: file_map = klass.make_file_map() for key, value in file_map.items(): assert_equal(value.filename, None) assert_equal(value.fileobj, None) assert_equal(value.pos, 0) # If we can't create new images in memory without loading, bail here - if not img_def['makeable']: + if not klass.makeable: continue # MGHImage accepts only a few datatypes # so we force a type change to float32 @@ -83,22 +82,21 @@ def test_files_interface(): def test_round_trip(): - # write an image to files - data = np.arange(24, dtype='i4').reshape((2,3,4)) - aff = np.eye(4) - klasses = [val['class'] for key, val in class_map.items() - if val['rw']] - for klass in klasses: - file_map = klass.make_file_map() - for key in file_map: - file_map[key].fileobj = BytesIO() - img = klass(data, aff) - img.file_map = file_map - img.to_file_map() - # read it back again from the written files - img2 = klass.from_file_map(file_map) - assert_array_equal(img2.get_data(), data) - # write, read it again - img2.to_file_map() - img3 = klass.from_file_map(file_map) - assert_array_equal(img3.get_data(), data) + # write an image to files + data = np.arange(24, dtype='i4').reshape((2,3,4)) + aff = np.eye(4) + klasses = filter(lambda klass: klass.rw, all_image_classes) + for klass in klasses: + file_map = klass.make_file_map() + for key in file_map: + file_map[key].fileobj = BytesIO() + img = klass(data, aff) + img.file_map = file_map + img.to_file_map() + # read it back again from the written files + img2 = klass.from_file_map(file_map) + assert_array_equal(img2.get_data(), data) + # write, read it again + img2.to_file_map() + img3 = klass.from_file_map(file_map) + assert_array_equal(img3.get_data(), data) diff --git a/nibabel/tests/test_image_load_save.py b/nibabel/tests/test_image_load_save.py index 7ade7d09c3..f533f0515a 100644 --- a/nibabel/tests/test_image_load_save.py +++ b/nibabel/tests/test_image_load_save.py @@ -26,14 +26,14 @@ from .. import loadsave as nils from .. import (Nifti1Image, Nifti1Header, Nifti1Pair, Nifti2Image, Nifti2Pair, Minc1Image, Minc2Image, Spm2AnalyzeImage, Spm99AnalyzeImage, - AnalyzeImage, MGHImage, class_map) + AnalyzeImage, MGHImage, all_image_classes) from ..tmpdirs import InTemporaryDirectory from ..volumeutils import native_code, swapped_code from numpy.testing import assert_array_equal, assert_array_almost_equal -from nose.tools import assert_true, assert_equal, assert_raises +from nose.tools import assert_true, assert_equal, assert_false, assert_raises DATA_PATH = pjoin(dirname(__file__), 'data') MGH_DATA_PATH = pjoin(dirname(__file__), '..', 'freesurfer', 'tests', 'data') @@ -53,20 +53,81 @@ def test_conversion(): affine = np.diag([1, 2, 3, 1]) for npt in np.float32, np.int16: data = np.arange(np.prod(shape), dtype=npt).reshape(shape) - for r_class_def in class_map.values(): - r_class = r_class_def['class'] - if not r_class_def['makeable']: + for r_class in all_image_classes: + if not r_class.makeable: continue img = r_class(data, affine) img.set_data_dtype(npt) - for w_class_def in class_map.values(): - if not w_class_def['makeable']: + for w_class in all_image_classes: + if not w_class.makeable: continue - w_class = w_class_def['class'] img2 = w_class.from_image(img) assert_array_equal(img2.get_data(), data) assert_array_equal(img2.affine, affine) +def test_sniff_and_guessed_image_type(): + # Randomize the class order + + def test_image_class(img_path, expected_img_klass): + + def check_img(img_path, expected_img_klass, mode, sniff=None, expect_match=True, msg=''): + if mode == 'no_sniff': + is_img, _ = expected_img_klass.is_image(img_path) + else: + is_img, sniff = expected_img_klass.is_image(img_path, sniff) + + msg = '%s (%s) image is%s a %s image.' % ( + img_path, + msg, + '' if is_img else ' not', + klass.__name__) + from ..spatialimages import ImageFileError + try: + klass.from_filename(img_path) + # assert_true(is_img, msg) + print("Passed: " + msg) + except ImageFileError: + print("Failed (image load): " + msg) + except Exception as e: + print("Failed (%s): %s" % (str(e), msg)) + # if is_img: + # raise + # assert_false(is_img, msg) # , issubclass(expected_img_klass, klass) and expect_match, msg) + return sniff + + for mode in ['vanilla', 'no-sniff']: + if mode == 'random': + img_klasses = all_image_classes.copy() + np.random.shuffle(img_klasses) + else: + img_klasses = all_image_classes + + if mode == 'no_sniff': + all_sniffs = [None] + bad_sniff = None + else: + sizeof_hdr = getattr(expected_img_klass.header_class, 'sizeof_hdr', 0) + all_sniffs = [None, '', 'a' * (sizeof_hdr - 1)] + bad_sniff = 'a' * sizeof_hdr + + # Test that passing in different sniffs is OK + if bad_sniff is not None: + for klass in img_klasses: + check_img(img_path, expected_img_klass, mode=mode, + sniff=bad_sniff, expect_match=False, + msg='%s / %s / %s' % (expected_img_klass.__name__, mode, 'bad_sniff')) + + for si, sniff in enumerate(all_sniffs): + for klass in img_klasses: + sniff = check_img(img_path, expected_img_klass, mode=mode, + sniff=sniff, expect_match=True, + msg='%s / %s / %d' % (expected_img_klass.__name__, mode, si)) + + + + # Test whether we can guess the image type from example files + test_image_class(pjoin(DATA_PATH, 'analyze.hdr'), + Spm2AnalyzeImage) def test_save_load_endian(): shape = (2, 4, 6) @@ -265,62 +326,3 @@ def test_filename_save(): del rt_img finally: shutil.rmtree(pth) - - -def test_analyze_detection(): - # Test detection of Analyze, Nifti1 and Nifti2 - # Algorithm is as described in loadsave:which_analyze_type - def wat(hdr): - return nils.which_analyze_type(hdr.binaryblock) - n1_hdr = Nifti1Header(b'\0' * 348, check=False) - assert_equal(wat(n1_hdr), None) - n1_hdr['sizeof_hdr'] = 540 - assert_equal(wat(n1_hdr), 'nifti2') - assert_equal(wat(n1_hdr.as_byteswapped()), 'nifti2') - n1_hdr['sizeof_hdr'] = 348 - assert_equal(wat(n1_hdr), 'analyze') - assert_equal(wat(n1_hdr.as_byteswapped()), 'analyze') - n1_hdr['magic'] = b'n+1' - assert_equal(wat(n1_hdr), 'nifti1') - assert_equal(wat(n1_hdr.as_byteswapped()), 'nifti1') - n1_hdr['magic'] = b'ni1' - assert_equal(wat(n1_hdr), 'nifti1') - assert_equal(wat(n1_hdr.as_byteswapped()), 'nifti1') - # Doesn't matter what magic is if it's not a nifti1 magic - n1_hdr['magic'] = b'ni2' - assert_equal(wat(n1_hdr), 'analyze') - n1_hdr['sizeof_hdr'] = 0 - n1_hdr['magic'] = b'' - assert_equal(wat(n1_hdr), None) - n1_hdr['magic'] = 'n+1' - assert_equal(wat(n1_hdr), 'nifti1') - n1_hdr['magic'] = 'ni1' - assert_equal(wat(n1_hdr), 'nifti1') - - -def test_guessed_image_type(): - # Test whether we can guess the image type from example files - assert_equal(nils.guessed_image_type( - pjoin(DATA_PATH, 'example4d.nii.gz')), - Nifti1Image) - assert_equal(nils.guessed_image_type( - pjoin(DATA_PATH, 'nifti1.hdr')), - Nifti1Pair) - assert_equal(nils.guessed_image_type( - pjoin(DATA_PATH, 'example_nifti2.nii.gz')), - Nifti2Image) - assert_equal(nils.guessed_image_type( - pjoin(DATA_PATH, 'nifti2.hdr')), - Nifti2Pair) - assert_equal(nils.guessed_image_type( - pjoin(DATA_PATH, 'tiny.mnc')), - Minc1Image) - assert_equal(nils.guessed_image_type( - pjoin(DATA_PATH, 'small.mnc')), - Minc2Image) - assert_equal(nils.guessed_image_type( - pjoin(DATA_PATH, 'test.mgz')), - MGHImage) - assert_equal(nils.guessed_image_type( - pjoin(DATA_PATH, 'analyze.hdr')), - Spm2AnalyzeImage) diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index d2d6d0a93e..29c227be73 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -371,9 +371,9 @@ def test_load_mmap(self): back_img = func(param1, **kwargs) back_data = back_img.get_data() if expected_mode is None: - assert_false(isinstance(back_data, np.memmap)) + assert_false(isinstance(back_data, np.memmap), 'Should not be a %s' % img_klass.__name__) else: - assert_true(isinstance(back_data, np.memmap)) + assert_true(isinstance(back_data, np.memmap), 'Not a %s' % img_klass.__name__) if self.check_mmap_mode: assert_equal(back_data.mode, expected_mode) del back_img, back_data