1# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
2# vi: set ft=python sts=4 ts=4 sw=4 et:
3### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
4#
5#   See COPYING file distributed along with the NiBabel package for the
6#   copyright and license terms.
7#
8### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
9""" Read / write access to the basic Mayo Analyze format
10
11===========================
12 The Analyze header format
13===========================
14
15This is a binary header format and inherits from ``WrapStruct``
16
17Apart from the attributes and methods of WrapStruct:
18
19Class attributes are::
20
21    .default_x_flip
22
23with methods::
24
25    .get/set_data_shape
26    .get/set_data_dtype
27    .get/set_zooms
28    .get/set_data_offset
29    .get_base_affine()
30    .get_best_affine()
31    .data_to_fileobj
32    .data_from_fileobj
33
34and class methods::
35
36    .from_header(hdr)
37
38More sophisticated headers can add more methods and attributes.
39
40Notes
41-----
42
43This - basic - analyze header cannot encode full affines (only
44diagonal affines), and cannot do integer scaling.
45
46The inability to store affines means that we have to guess what orientation the
47image has.  Most Analyze images are stored on disk in (fastest-changing to
48slowest-changing) R->L, P->A and I->S order.  That is, the first voxel is the
49rightmost, most posterior and most inferior voxel location in the image, and
50the next voxel is one voxel towards the left of the image.
51
52Most people refer to this disk storage format as 'radiological', on the basis
53that, if you load up the data as an array ``img_arr`` where the first axis is
54the fastest changing, then take a slice in the I->S axis - ``img_arr[:,:,10]``
55- then the right part of the brain will be on the left of your displayed slice.
56Radiologists like looking at images where the left of the brain is on the right
57side of the image.
58
59Conversely, if the image has the voxels stored with the left voxels first -
60L->R, P->A, I->S, then this would be 'neurological' format.  Neurologists like
61looking at images where the left side of the brain is on the left of the image.
62
63When we are guessing at an affine for Analyze, this translates to the problem
64of whether the affine should consider proceeding within the data down an X line
65as being from left to right, or right to left.
66
67By default we assume that the image is stored in R->L format.  We encode this
68choice in the ``default_x_flip`` flag that can be True or False.  True means
69assume radiological.
70
71If the image is 3D, and the X, Y and Z zooms are x, y, and z, then::
72
73    if default_x_flip is True::
74        affine = np.diag((-x,y,z,1))
75    else:
76        affine = np.diag((x,y,z,1))
77
78In our implementation, there is no way of saving this assumed flip into the
79header.  One way of doing this, that we have not used, is to allow negative
80zooms, in particular, negative X zooms.  We did not do this because the image
81can be loaded with and without a default flip, so the saved zoom will not
82constrain the affine.
83"""
84
85import numpy as np
86
87from .volumeutils import (native_code, swapped_code, make_dt_codes,
88                          shape_zoom_affine, array_from_file, seek_tell,
89                          apply_read_scaling)
90from .arraywriters import (make_array_writer, get_slope_inter, WriterError,
91                           ArrayWriter)
92from .wrapstruct import LabeledWrapStruct
93from .spatialimages import (HeaderDataError, HeaderTypeError,
94                            SpatialImage)
95from .fileholders import copy_file_map
96from .batteryrunners import Report
97from .arrayproxy import ArrayProxy
98
99# Sub-parts of standard analyze header from
100# Mayo dbh.h file
101header_key_dtd = [
102    ('sizeof_hdr', 'i4'),
103    ('data_type', 'S10'),
104    ('db_name', 'S18'),
105    ('extents', 'i4'),
106    ('session_error', 'i2'),
107    ('regular', 'S1'),
108    ('hkey_un0', 'S1')
109]
110image_dimension_dtd = [
111    ('dim', 'i2', (8,)),
112    ('vox_units', 'S4'),
113    ('cal_units', 'S8'),
114    ('unused1', 'i2'),
115    ('datatype', 'i2'),
116    ('bitpix', 'i2'),
117    ('dim_un0', 'i2'),
118    ('pixdim', 'f4', (8,)),
119    ('vox_offset', 'f4'),
120    ('funused1', 'f4'),
121    ('funused2', 'f4'),
122    ('funused3', 'f4'),
123    ('cal_max', 'f4'),
124    ('cal_min', 'f4'),
125    ('compressed', 'i4'),
126    ('verified', 'i4'),
127    ('glmax', 'i4'),
128    ('glmin', 'i4')
129]
130data_history_dtd = [
131    ('descrip', 'S80'),
132    ('aux_file', 'S24'),
133    ('orient', 'S1'),
134    ('originator', 'S10'),
135    ('generated', 'S10'),
136    ('scannum', 'S10'),
137    ('patient_id', 'S10'),
138    ('exp_date', 'S10'),
139    ('exp_time', 'S10'),
140    ('hist_un0', 'S3'),
141    ('views', 'i4'),
142    ('vols_added', 'i4'),
143    ('start_field', 'i4'),
144    ('field_skip', 'i4'),
145    ('omax', 'i4'),
146    ('omin', 'i4'),
147    ('smax', 'i4'),
148    ('smin', 'i4')
149]
150
151# Full header numpy dtype combined across sub-fields
152header_dtype = np.dtype(header_key_dtd + image_dimension_dtd +
153                        data_history_dtd)
154
155_dtdefs = (  # code, conversion function, equivalent dtype, aliases
156    (0, 'none', np.void),
157    (1, 'binary', np.void),  # 1 bit per voxel, needs thought
158    (2, 'uint8', np.uint8),
159    (4, 'int16', np.int16),
160    (8, 'int32', np.int32),
161    (16, 'float32', np.float32),
162    (32, 'complex64', np.complex64),  # numpy complex format?
163    (64, 'float64', np.float64),
164    (128, 'RGB', np.dtype([('R', 'u1'),
165                           ('G', 'u1'),
166                           ('B', 'u1')])),
167    (255, 'all', np.void))
168
169# Make full code alias bank, including dtype column
170data_type_codes = make_dt_codes(_dtdefs)
171
172
173class AnalyzeHeader(LabeledWrapStruct):
174    """ Class for basic analyze header
175
176    Implements zoom-only setting of affine transform, and no image
177    scaling
178    """
179    # Copies of module-level definitions
180    template_dtype = header_dtype
181    _data_type_codes = data_type_codes
182    # fields with recoders for their values
183    _field_recoders = {'datatype': data_type_codes}
184    # default x flip
185    default_x_flip = True
186
187    # data scaling capabilities
188    has_data_slope = False
189    has_data_intercept = False
190
191    sizeof_hdr = 348
192
193    def __init__(self,
194                 binaryblock=None,
195                 endianness=None,
196                 check=True):
197        """ Initialize header from binary data block
198
199        Parameters
200        ----------
201        binaryblock : {None, string} optional
202            binary block to set into header.  By default, None, in
203            which case we insert the default empty header block
204        endianness : {None, '<','>', other endian code} string, optional
205            endianness of the binaryblock.  If None, guess endianness
206            from the data.
207        check : bool, optional
208            Whether to check content of header in initialization.
209            Default is True.
210
211        Examples
212        --------
213        >>> hdr1 = AnalyzeHeader() # an empty header
214        >>> hdr1.endianness == native_code
215        True
216        >>> hdr1.get_data_shape()
217        (0,)
218        >>> hdr1.set_data_shape((1,2,3)) # now with some content
219        >>> hdr1.get_data_shape()
220        (1, 2, 3)
221
222        We can set the binary block directly via this initialization.
223        Here we get it from the header we have just made
224
225        >>> binblock2 = hdr1.binaryblock
226        >>> hdr2 = AnalyzeHeader(binblock2)
227        >>> hdr2.get_data_shape()
228        (1, 2, 3)
229
230        Empty headers are native endian by default
231
232        >>> hdr2.endianness == native_code
233        True
234
235        You can pass valid opposite endian headers with the
236        ``endianness`` parameter. Even empty headers can have
237        endianness
238
239        >>> hdr3 = AnalyzeHeader(endianness=swapped_code)
240        >>> hdr3.endianness == swapped_code
241        True
242
243        If you do not pass an endianness, and you pass some data, we
244        will try to guess from the passed data.
245
246        >>> binblock3 = hdr3.binaryblock
247        >>> hdr4 = AnalyzeHeader(binblock3)
248        >>> hdr4.endianness == swapped_code
249        True
250        """
251        super(AnalyzeHeader, self).__init__(binaryblock, endianness, check)
252
253    @classmethod
254    def guessed_endian(klass, hdr):
255        """ Guess intended endianness from mapping-like ``hdr``
256
257        Parameters
258        ----------
259        hdr : mapping-like
260           hdr for which to guess endianness
261
262        Returns
263        -------
264        endianness : {'<', '>'}
265           Guessed endianness of header
266
267        Examples
268        --------
269        Zeros header, no information, guess native
270
271        >>> hdr = AnalyzeHeader()
272        >>> hdr_data = np.zeros((), dtype=header_dtype)
273        >>> AnalyzeHeader.guessed_endian(hdr_data) == native_code
274        True
275
276        A valid native header is guessed native
277
278        >>> hdr_data = hdr.structarr.copy()
279        >>> AnalyzeHeader.guessed_endian(hdr_data) == native_code
280        True
281
282        And, when swapped, is guessed as swapped
283
284        >>> sw_hdr_data = hdr_data.byteswap(swapped_code)
285        >>> AnalyzeHeader.guessed_endian(sw_hdr_data) == swapped_code
286        True
287
288        The algorithm is as follows:
289
290        First, look at the first value in the ``dim`` field; this
291        should be between 0 and 7.  If it is between 1 and 7, then
292        this must be a native endian header.
293
294        >>> hdr_data = np.zeros((), dtype=header_dtype) # blank binary data
295        >>> hdr_data['dim'][0] = 1
296        >>> AnalyzeHeader.guessed_endian(hdr_data) == native_code
297        True
298        >>> hdr_data['dim'][0] = 6
299        >>> AnalyzeHeader.guessed_endian(hdr_data) == native_code
300        True
301        >>> hdr_data['dim'][0] = -1
302        >>> AnalyzeHeader.guessed_endian(hdr_data) == swapped_code
303        True
304
305        If the first ``dim`` value is zeros, we need a tie breaker.
306        In that case we check the ``sizeof_hdr`` field.  This should
307        be 348.  If it looks like the byteswapped value of 348,
308        assumed swapped.  Otherwise assume native.
309
310        >>> hdr_data = np.zeros((), dtype=header_dtype) # blank binary data
311        >>> AnalyzeHeader.guessed_endian(hdr_data) == native_code
312        True
313        >>> hdr_data['sizeof_hdr'] = 1543569408
314        >>> AnalyzeHeader.guessed_endian(hdr_data) == swapped_code
315        True
316        >>> hdr_data['sizeof_hdr'] = -1
317        >>> AnalyzeHeader.guessed_endian(hdr_data) == native_code
318        True
319
320        This is overridden by the ``dim[0]`` value though:
321
322        >>> hdr_data['sizeof_hdr'] = 1543569408
323        >>> hdr_data['dim'][0] = 1
324        >>> AnalyzeHeader.guessed_endian(hdr_data) == native_code
325        True
326        """
327        dim0 = int(hdr['dim'][0])
328        if dim0 == 0:
329            if hdr['sizeof_hdr'].byteswap() == klass.sizeof_hdr:
330                return swapped_code
331            return native_code
332        elif 1 <= dim0 <= 7:
333            return native_code
334        return swapped_code
335
336    @classmethod
337    def default_structarr(klass, endianness=None):
338        """ Return header data for empty header with given endianness
339        """
340        hdr_data = super(AnalyzeHeader, klass).default_structarr(endianness)
341        hdr_data['sizeof_hdr'] = klass.sizeof_hdr
342        hdr_data['dim'] = 1
343        hdr_data['dim'][0] = 0
344        hdr_data['pixdim'] = 1
345        hdr_data['datatype'] = 16  # float32
346        hdr_data['bitpix'] = 32
347        return hdr_data
348
349    @classmethod
350    def from_header(klass, header=None, check=True):
351        """ Class method to create header from another header
352
353        Parameters
354        ----------
355        header : ``Header`` instance or mapping
356           a header of this class, or another class of header for
357           conversion to this type
358        check : {True, False}
359           whether to check header for integrity
360
361        Returns
362        -------
363        hdr : header instance
364           fresh header instance of our own class
365        """
366        # own type, return copy
367        if type(header) == klass:
368            obj = header.copy()
369            if check:
370                obj.check_fix()
371            return obj
372        # not own type, make fresh header instance
373        obj = klass(check=check)
374        if header is None:
375            return obj
376        if hasattr(header, 'as_analyze_map'):
377            # header is convertible from a field mapping
378            mapping = header.as_analyze_map()
379            for key in mapping:
380                try:
381                    obj[key] = mapping[key]
382                except (ValueError, KeyError):
383                    # the presence of the mapping certifies the fields as being
384                    # of the same meaning as for Analyze types, so we can
385                    # safely discard fields with names not known to this header
386                    # type on the basis they are from the wrong Analyze dialect
387                    pass
388            # set any fields etc that are specific to this format (overriden by
389            # sub-classes)
390            obj._clean_after_mapping()
391        # Fallback basic conversion always done.
392        # More specific warning for unsupported datatypes
393        orig_code = header.get_data_dtype()
394        try:
395            obj.set_data_dtype(orig_code)
396        except HeaderDataError:
397            raise HeaderDataError(f"Input header {header.__class__} has "
398                                  f"datatype {header.get_value_label('datatype')} "
399                                  f"but output header {klass} does not support it")
400        obj.set_data_dtype(header.get_data_dtype())
401        obj.set_data_shape(header.get_data_shape())
402        obj.set_zooms(header.get_zooms())
403        if check:
404            obj.check_fix()
405        return obj
406
407    def _clean_after_mapping(self):
408        """ Set format-specific stuff after converting header from mapping
409
410        This routine cleans up Analyze-type headers that have had their fields
411        set from an Analyze map returned by the ``as_analyze_map`` method.
412        Nifti 1 / 2, SPM Analyze, Analyze are all Analyze-type headers.
413        Because this map can set fields that are illegal for particular
414        subtypes of the Analyze header, this routine cleans these up before the
415        resulting header is checked and returned.
416
417        For example, a Nifti1 single (``.nii``) header has magic "n+1".
418        Passing the nifti single header for conversion to a Nifti1Pair header
419        using the ``as_analyze_map`` method will by default set the header
420        magic to "n+1", when it should be "ni1" for the pair header.  This
421        method is for that kind of case - so the specific header can set fields
422        like magic correctly, even though the mapping has given a wrong value.
423        """
424        # All current Nifti etc fields that are present in the Analyze header
425        # have the same meaning as they do for Analyze.
426        pass
427
428    def raw_data_from_fileobj(self, fileobj):
429        """ Read unscaled data array from `fileobj`
430
431        Parameters
432        ----------
433        fileobj : file-like
434           Must be open, and implement ``read`` and ``seek`` methods
435
436        Returns
437        -------
438        arr : ndarray
439           unscaled data array
440        """
441        dtype = self.get_data_dtype()
442        shape = self.get_data_shape()
443        offset = self.get_data_offset()
444        return array_from_file(shape, dtype, fileobj, offset)
445
446    def data_from_fileobj(self, fileobj):
447        """ Read scaled data array from `fileobj`
448
449        Use this routine to get the scaled image data from an image file
450        `fileobj`, given a header `self`.  "Scaled" means, with any header
451        scaling factors applied to the raw data in the file.  Use
452        `raw_data_from_fileobj` to get the raw data.
453
454        Parameters
455        ----------
456        fileobj : file-like
457           Must be open, and implement ``read`` and ``seek`` methods
458
459        Returns
460        -------
461        arr : ndarray
462           scaled data array
463
464        Notes
465        -----
466        We use the header to get any scale or intercept values to apply to the
467        data.  Raw Analyze files don't have scale factors or intercepts, but
468        this routine also works with formats based on Analyze, that do have
469        scaling, such as SPM analyze formats and NIfTI.
470        """
471        # read unscaled data
472        data = self.raw_data_from_fileobj(fileobj)
473        # get scalings from header.  Value of None means not present in header
474        slope, inter = self.get_slope_inter()
475        slope = 1.0 if slope is None else slope
476        inter = 0.0 if inter is None else inter
477        # Upcast as necessary for big slopes, intercepts
478        return apply_read_scaling(data, slope, inter)
479
480    def data_to_fileobj(self, data, fileobj, rescale=True):
481        """ Write `data` to `fileobj`, maybe rescaling data, modifying `self`
482
483        In writing the data, we match the header to the written data, by
484        setting the header scaling factors, iff `rescale` is True.  Thus we
485        modify `self` in the process of writing the data.
486
487        Parameters
488        ----------
489        data : array-like
490           data to write; should match header defined shape
491        fileobj : file-like object
492           Object with file interface, implementing ``write`` and
493           ``seek``
494        rescale : {True, False}, optional
495            Whether to try and rescale data to match output dtype specified by
496            header. If True and scaling needed and header cannot scale, then
497            raise ``HeaderTypeError``.
498
499        Examples
500        --------
501        >>> from nibabel.analyze import AnalyzeHeader
502        >>> hdr = AnalyzeHeader()
503        >>> hdr.set_data_shape((1, 2, 3))
504        >>> hdr.set_data_dtype(np.float64)
505        >>> from io import BytesIO
506        >>> str_io = BytesIO()
507        >>> data = np.arange(6).reshape(1,2,3)
508        >>> hdr.data_to_fileobj(data, str_io)
509        >>> data.astype(np.float64).tobytes('F') == str_io.getvalue()
510        True
511        """
512        data = np.asanyarray(data)
513        shape = self.get_data_shape()
514        if data.shape != shape:
515            raise HeaderDataError('Data should be shape (%s)' %
516                                  ', '.join(str(s) for s in shape))
517        out_dtype = self.get_data_dtype()
518        if rescale:
519            try:
520                arr_writer = make_array_writer(data,
521                                               out_dtype,
522                                               self.has_data_slope,
523                                               self.has_data_intercept)
524            except WriterError as e:
525                raise HeaderTypeError(str(e))
526        else:
527            arr_writer = ArrayWriter(data, out_dtype, check_scaling=False)
528        seek_tell(fileobj, self.get_data_offset())
529        arr_writer.to_fileobj(fileobj)
530        self.set_slope_inter(*get_slope_inter(arr_writer))
531
532    def get_data_dtype(self):
533        """ Get numpy dtype for data
534
535        For examples see ``set_data_dtype``
536        """
537        code = int(self._structarr['datatype'])
538        dtype = self._data_type_codes.dtype[code]
539        return dtype.newbyteorder(self.endianness)
540
541    def set_data_dtype(self, datatype):
542        """ Set numpy dtype for data from code or dtype or type
543
544        Examples
545        --------
546        >>> hdr = AnalyzeHeader()
547        >>> hdr.set_data_dtype(np.uint8)
548        >>> hdr.get_data_dtype()
549        dtype('uint8')
550        >>> hdr.set_data_dtype(np.dtype(np.uint8))
551        >>> hdr.get_data_dtype()
552        dtype('uint8')
553        >>> hdr.set_data_dtype('implausible') #doctest: +IGNORE_EXCEPTION_DETAIL
554        Traceback (most recent call last):
555           ...
556        HeaderDataError: data dtype "implausible" not recognized
557        >>> hdr.set_data_dtype('none') #doctest: +IGNORE_EXCEPTION_DETAIL
558        Traceback (most recent call last):
559           ...
560        HeaderDataError: data dtype "none" known but not supported
561        >>> hdr.set_data_dtype(np.void) #doctest: +IGNORE_EXCEPTION_DETAIL
562        Traceback (most recent call last):
563           ...
564        HeaderDataError: data dtype "<type 'numpy.void'>" known but not supported
565        """
566        dt = datatype
567        if dt not in self._data_type_codes:
568            try:
569                dt = np.dtype(dt)
570            except TypeError:
571                raise HeaderDataError(
572                    f'data dtype "{datatype}" not recognized')
573            if dt not in self._data_type_codes:
574                raise HeaderDataError(
575                    f'data dtype "{datatype}" not supported')
576        code = self._data_type_codes[dt]
577        dtype = self._data_type_codes.dtype[code]
578        # test for void, being careful of user-defined types
579        if dtype.type is np.void and not dtype.fields:
580            raise HeaderDataError(
581                f'data dtype "{datatype}" known but not supported')
582        self._structarr['datatype'] = code
583        self._structarr['bitpix'] = dtype.itemsize * 8
584
585    def get_data_shape(self):
586        """ Get shape of data
587
588        Examples
589        --------
590        >>> hdr = AnalyzeHeader()
591        >>> hdr.get_data_shape()
592        (0,)
593        >>> hdr.set_data_shape((1,2,3))
594        >>> hdr.get_data_shape()
595        (1, 2, 3)
596
597        Expanding number of dimensions gets default zooms
598
599        >>> hdr.get_zooms()
600        (1.0, 1.0, 1.0)
601        """
602        dims = self._structarr['dim']
603        ndims = dims[0]
604        if ndims == 0:
605            return 0,
606        return tuple(int(d) for d in dims[1:ndims + 1])
607
608    def set_data_shape(self, shape):
609        """ Set shape of data
610
611        If ``ndims == len(shape)`` then we set zooms for dimensions higher than
612        ``ndims`` to 1.0
613
614        Parameters
615        ----------
616        shape : sequence
617           sequence of integers specifying data array shape
618        """
619        dims = self._structarr['dim']
620        ndims = len(shape)
621        dims[:] = 1
622        dims[0] = ndims
623        try:
624            dims[1:ndims + 1] = shape
625        except (ValueError, OverflowError):
626            # numpy 1.4.1 at least generates a ValueError from trying to set a
627            # python long into an int64 array (dims are int64 for nifti2)
628            values_fit = False
629        else:
630            values_fit = np.all(dims[1:ndims + 1] == shape)
631        # Error if we did not succeed setting dimensions
632        if not values_fit:
633            raise HeaderDataError(f'shape {shape} does not fit in dim datatype')
634        self._structarr['pixdim'][ndims + 1:] = 1.0
635
636    def get_base_affine(self):
637        """ Get affine from basic (shared) header fields
638
639        Note that we get the translations from the center of the
640        image.
641
642        Examples
643        --------
644        >>> hdr = AnalyzeHeader()
645        >>> hdr.set_data_shape((3, 5, 7))
646        >>> hdr.set_zooms((3, 2, 1))
647        >>> hdr.default_x_flip
648        True
649        >>> hdr.get_base_affine() # from center of image
650        array([[-3.,  0.,  0.,  3.],
651               [ 0.,  2.,  0., -4.],
652               [ 0.,  0.,  1., -3.],
653               [ 0.,  0.,  0.,  1.]])
654        """
655        hdr = self._structarr
656        dims = hdr['dim']
657        ndim = dims[0]
658        return shape_zoom_affine(hdr['dim'][1:ndim + 1],
659                                 hdr['pixdim'][1:ndim + 1],
660                                 self.default_x_flip)
661
662    get_best_affine = get_base_affine
663
664    def get_zooms(self):
665        """ Get zooms from header
666
667        Returns
668        -------
669        z : tuple
670           tuple of header zoom values
671
672        Examples
673        --------
674        >>> hdr = AnalyzeHeader()
675        >>> hdr.get_zooms()
676        (1.0,)
677        >>> hdr.set_data_shape((1,2))
678        >>> hdr.get_zooms()
679        (1.0, 1.0)
680        >>> hdr.set_zooms((3, 4))
681        >>> hdr.get_zooms()
682        (3.0, 4.0)
683        """
684        hdr = self._structarr
685        dims = hdr['dim']
686        ndim = dims[0]
687        if ndim == 0:
688            return (1.0,)
689        pixdims = hdr['pixdim']
690        return tuple(pixdims[1:ndim + 1])
691
692    def set_zooms(self, zooms):
693        """ Set zooms into header fields
694
695        See docstring for ``get_zooms`` for examples
696        """
697        hdr = self._structarr
698        dims = hdr['dim']
699        ndim = dims[0]
700        zooms = np.asarray(zooms)
701        if len(zooms) != ndim:
702            raise HeaderDataError('Expecting %d zoom values for ndim %d'
703                                  % (ndim, ndim))
704        if np.any(zooms < 0):
705            raise HeaderDataError('zooms must be positive')
706        pixdims = hdr['pixdim']
707        pixdims[1:ndim + 1] = zooms[:]
708
709    def as_analyze_map(self):
710        """ Return header as mapping for conversion to Analyze types
711
712        Collect data from custom header type to fill in fields for Analyze and
713        derived header types (such as Nifti1 and Nifti2).
714
715        When Analyze types convert another header type to their own type, they
716        call this this method to check if there are other Analyze / Nifti
717        fields that the source header would like to set.
718
719        Returns
720        -------
721        analyze_map : mapping
722            Object that can be used as a mapping thus::
723
724                for key in analyze_map:
725                    value = analyze_map[key]
726
727            where ``key`` is the name of a field that can be set in an Analyze
728            header type, such as Nifti1, and ``value`` is a value for the
729            field.  For example, `analyze_map` might be a something like
730            ``dict(regular='y', slice_duration=0.3)`` where ``regular`` is a
731            field present in both Analyze and Nifti1, and ``slice_duration`` is
732            a field restricted to Nifti1 and Nifti2.  If a particular Analyze
733            header type does not recognize the field name, it will throw away
734            the value without error.  See :meth:`Analyze.from_header`.
735
736        Notes
737        -----
738        You can also return a Nifti header with the relevant fields set.
739
740        Your header still needs methods ``get_data_dtype``, ``get_data_shape``
741        and ``get_zooms``, for the conversion, and these get called *after*
742        using the analyze map, so the methods will override values set in the
743        map.
744        """
745        # In the case of Analyze types, the header is already such a mapping
746        return self
747
748    def set_data_offset(self, offset):
749        """ Set offset into data file to read data
750        """
751        self._structarr['vox_offset'] = offset
752
753    def get_data_offset(self):
754        """ Return offset into data file to read data
755
756        Examples
757        --------
758        >>> hdr = AnalyzeHeader()
759        >>> hdr.get_data_offset()
760        0
761        >>> hdr['vox_offset'] = 12
762        >>> hdr.get_data_offset()
763        12
764        """
765        return int(self._structarr['vox_offset'])
766
767    def get_slope_inter(self):
768        """ Get scalefactor and intercept
769
770        These are not implemented for basic Analyze
771        """
772        return None, None
773
774    def set_slope_inter(self, slope, inter=None):
775        """ Set slope and / or intercept into header
776
777        Set slope and intercept for image data, such that, if the image
778        data is ``arr``, then the scaled image data will be ``(arr *
779        slope) + inter``
780
781        In this case, for Analyze images, we can't store the slope or the
782        intercept, so this method only checks that `slope` is None or NaN or
783        1.0, and that `inter` is None or NaN or 0.
784
785        Parameters
786        ----------
787        slope : None or float
788            If float, value must be NaN or 1.0 or we raise a ``HeaderTypeError``
789        inter : None or float, optional
790            If float, value must be 0.0 or we raise a ``HeaderTypeError``
791        """
792        if ((slope in (None, 1) or np.isnan(slope)) and
793                (inter in (None, 0) or np.isnan(inter))):
794            return
795        raise HeaderTypeError('Cannot set slope != 1 or intercept != 0 '
796                              'for Analyze headers')
797
798    @classmethod
799    def _get_checks(klass):
800        """ Return sequence of check functions for this class """
801        return (klass._chk_sizeof_hdr,
802                klass._chk_datatype,
803                klass._chk_bitpix,
804                klass._chk_pixdims)
805
806    """ Check functions in format expected by BatteryRunner class """
807
808    @classmethod
809    def _chk_sizeof_hdr(klass, hdr, fix=False):
810        rep = Report(HeaderDataError)
811        if hdr['sizeof_hdr'] == klass.sizeof_hdr:
812            return hdr, rep
813        rep.problem_level = 30
814        rep.problem_msg = 'sizeof_hdr should be ' + str(klass.sizeof_hdr)
815        if fix:
816            hdr['sizeof_hdr'] = klass.sizeof_hdr
817            rep.fix_msg = 'set sizeof_hdr to ' + str(klass.sizeof_hdr)
818        return hdr, rep
819
820    @classmethod
821    def _chk_datatype(klass, hdr, fix=False):
822        rep = Report(HeaderDataError)
823        code = int(hdr['datatype'])
824        try:
825            dtype = klass._data_type_codes.dtype[code]
826        except KeyError:
827            rep.problem_level = 40
828            rep.problem_msg = 'data code %d not recognized' % code
829        else:
830            if dtype.itemsize == 0:
831                rep.problem_level = 40
832                rep.problem_msg = 'data code %d not supported' % code
833            else:
834                return hdr, rep
835        if fix:
836            rep.fix_msg = 'not attempting fix'
837        return hdr, rep
838
839    @classmethod
840    def _chk_bitpix(klass, hdr, fix=False):
841        rep = Report(HeaderDataError)
842        code = int(hdr['datatype'])
843        try:
844            dt = klass._data_type_codes.dtype[code]
845        except KeyError:
846            rep.problem_level = 10
847            rep.problem_msg = 'no valid datatype to fix bitpix'
848            if fix:
849                rep.fix_msg = 'no way to fix bitpix'
850            return hdr, rep
851        bitpix = dt.itemsize * 8
852        if bitpix == hdr['bitpix']:
853            return hdr, rep
854        rep.problem_level = 10
855        rep.problem_msg = 'bitpix does not match datatype'
856        if fix:
857            hdr['bitpix'] = bitpix  # inplace modification
858            rep.fix_msg = 'setting bitpix to match datatype'
859        return hdr, rep
860
861    @staticmethod
862    def _chk_pixdims(hdr, fix=False):
863        rep = Report(HeaderDataError)
864        pixdims = hdr['pixdim']
865        spat_dims = pixdims[1:4]
866        if not np.any(spat_dims <= 0):
867            return hdr, rep
868        neg_dims = spat_dims < 0
869        zero_dims = spat_dims == 0
870        pmsgs = []
871        fmsgs = []
872        if np.any(zero_dims):
873            level = 30
874            pmsgs.append('pixdim[1,2,3] should be non-zero')
875            if fix:
876                spat_dims[zero_dims] = 1
877                fmsgs.append('setting 0 dims to 1')
878        if np.any(neg_dims):
879            level = 35
880            pmsgs.append('pixdim[1,2,3] should be positive')
881            if fix:
882                spat_dims = np.abs(spat_dims)
883                fmsgs.append('setting to abs of pixdim values')
884        rep.problem_level = level
885        rep.problem_msg = ' and '.join(pmsgs)
886        if fix:
887            pixdims[1:4] = spat_dims
888            rep.fix_msg = ' and '.join(fmsgs)
889        return hdr, rep
890
891    @classmethod
892    def may_contain_header(klass, binaryblock):
893        if len(binaryblock) < klass.sizeof_hdr:
894            return False
895
896        hdr_struct = np.ndarray(shape=(), dtype=header_dtype,
897                                buffer=binaryblock[:klass.sizeof_hdr])
898        bs_hdr_struct = hdr_struct.byteswap()
899        return 348 in (hdr_struct['sizeof_hdr'], bs_hdr_struct['sizeof_hdr'])
900
901
902class AnalyzeImage(SpatialImage):
903    """ Class for basic Analyze format image
904    """
905    header_class = AnalyzeHeader
906    _meta_sniff_len = header_class.sizeof_hdr
907    files_types = (('image', '.img'), ('header', '.hdr'))
908    valid_exts = ('.img', '.hdr')
909    _compressed_suffixes = ('.gz', '.bz2')
910
911    makeable = True
912    rw = True
913
914    ImageArrayProxy = ArrayProxy
915
916    def __init__(self, dataobj, affine, header=None,
917                 extra=None, file_map=None):
918        super(AnalyzeImage, self).__init__(
919            dataobj, affine, header, extra, file_map)
920        # Reset consumable values
921        self._header.set_data_offset(0)
922        self._header.set_slope_inter(None, None)
923    __init__.__doc__ = SpatialImage.__init__.__doc__
924
925    def get_data_dtype(self):
926        return self._header.get_data_dtype()
927
928    def set_data_dtype(self, dtype):
929        self._header.set_data_dtype(dtype)
930
931    @classmethod
932    def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None):
933        """ Class method to create image from mapping in ``file_map``
934
935        .. deprecated:: 2.4.1
936            ``keep_file_open='auto'`` is redundant with `False` and has
937            been deprecated. It raises an error as of nibabel 3.0.
938
939        Parameters
940        ----------
941        file_map : dict
942            Mapping with (kay, value) pairs of (``file_type``, FileHolder
943            instance giving file-likes for each file needed for this image
944            type.
945        mmap : {True, False, 'c', 'r'}, optional, keyword only
946            `mmap` controls the use of numpy memory mapping for reading image
947            array data.  If False, do not try numpy ``memmap`` for data array.
948            If one of {'c', 'r'}, try numpy memmap with ``mode=mmap``.  A
949            `mmap` value of True gives the same behavior as ``mmap='c'``.  If
950            image data file cannot be memory-mapped, ignore `mmap` value and
951            read array from file.
952        keep_file_open : { None, True, False }, optional, keyword only
953            `keep_file_open` controls whether a new file handle is created
954            every time the image is accessed, or a single file handle is
955            created and used for the lifetime of this ``ArrayProxy``. If
956            ``True``, a single file handle is created and used. If ``False``,
957            a new file handle is created every time the image is accessed.
958            If ``file_map`` refers to an open file handle, this setting has no
959            effect. The default value (``None``) will result in the value of
960            ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT`` being used.
961
962        Returns
963        -------
964        img : AnalyzeImage instance
965        """
966        if mmap not in (True, False, 'c', 'r'):
967            raise ValueError("mmap should be one of {True, False, 'c', 'r'}")
968        hdr_fh, img_fh = klass._get_fileholders(file_map)
969        with hdr_fh.get_prepare_fileobj(mode='rb') as hdrf:
970            header = klass.header_class.from_fileobj(hdrf)
971        hdr_copy = header.copy()
972        imgf = img_fh.fileobj
973        if imgf is None:
974            imgf = img_fh.filename
975        data = klass.ImageArrayProxy(imgf, hdr_copy, mmap=mmap,
976                                     keep_file_open=keep_file_open)
977        # Initialize without affine to allow header to pass through unmodified
978        img = klass(data, None, header, file_map=file_map)
979        # set affine from header though
980        img._affine = header.get_best_affine()
981        img._load_cache = {'header': hdr_copy,
982                           'affine': img._affine.copy(),
983                           'file_map': copy_file_map(file_map)}
984        return img
985
986    @staticmethod
987    def _get_fileholders(file_map):
988        """ Return fileholder for header and image
989
990        Allows single-file image types to return one fileholder for both types.
991        For Analyze there are two fileholders, one for the header, one for the
992        image.
993        """
994        return file_map['header'], file_map['image']
995
996    def to_file_map(self, file_map=None):
997        """ Write image to `file_map` or contained ``self.file_map``
998
999        Parameters
1000        ----------
1001        file_map : None or mapping, optional
1002           files mapping.  If None (default) use object's ``file_map``
1003           attribute instead
1004        """
1005        if file_map is None:
1006            file_map = self.file_map
1007        data = np.asanyarray(self.dataobj)
1008        self.update_header()
1009        hdr = self._header
1010        out_dtype = self.get_data_dtype()
1011        # Store consumable values for later restore
1012        offset = hdr.get_data_offset()
1013        # Scalars of slope, offset to get immutable values
1014        slope = hdr['scl_slope'].item() if hdr.has_data_slope else np.nan
1015        inter = hdr['scl_inter'].item() if hdr.has_data_intercept else np.nan
1016        # Check whether to calculate slope / inter
1017        scale_me = np.all(np.isnan((slope, inter)))
1018        if scale_me:
1019            arr_writer = make_array_writer(data,
1020                                           out_dtype,
1021                                           hdr.has_data_slope,
1022                                           hdr.has_data_intercept)
1023        else:
1024            arr_writer = ArrayWriter(data, out_dtype, check_scaling=False)
1025        hdr_fh, img_fh = self._get_fileholders(file_map)
1026        # Check if hdr and img refer to same file; this can happen with odd
1027        # analyze images but most often this is because it's a single nifti
1028        # file
1029        hdr_img_same = hdr_fh.same_file_as(img_fh)
1030        hdrf = hdr_fh.get_prepare_fileobj(mode='wb')
1031        if hdr_img_same:
1032            imgf = hdrf
1033        else:
1034            imgf = img_fh.get_prepare_fileobj(mode='wb')
1035        # Rescale values if asked
1036        if scale_me:
1037            hdr.set_slope_inter(*get_slope_inter(arr_writer))
1038        # Write header
1039        hdr.write_to(hdrf)
1040        # Write image
1041        # Seek to writing position, get there by writing zeros if seek fails
1042        seek_tell(imgf, hdr.get_data_offset(), write0=True)
1043        # Write array data
1044        arr_writer.to_fileobj(imgf)
1045        hdrf.close_if_mine()
1046        if not hdr_img_same:
1047            imgf.close_if_mine()
1048        self._header = hdr
1049        self.file_map = file_map
1050        # Restore any changed consumable values
1051        hdr.set_data_offset(offset)
1052        if hdr.has_data_slope:
1053            hdr['scl_slope'] = slope
1054        if hdr.has_data_intercept:
1055            hdr['scl_inter'] = inter
1056
1057
1058load = AnalyzeImage.load
1059save = AnalyzeImage.instance_to_filename
1060