1# -*- coding: utf-8 -*-
2import os
3import sys
4import contextlib
5import logging
6
7from os.path import join as pjoin
8from hashlib import md5
9from shutil import copyfileobj
10from tqdm.auto import tqdm
11import numpy as np
12import nibabel as nib
13
14import tarfile
15import zipfile
16from dipy.core.gradients import (gradient_table,
17                                 gradient_table_from_gradient_strength_bvecs)
18from dipy.io.gradients import read_bvals_bvecs
19from dipy.io.image import load_nifti, load_nifti_data
20
21from urllib.request import urlopen
22
23# Set a user-writeable file-system location to put files:
24if 'DIPY_HOME' in os.environ:
25    dipy_home = os.environ['DIPY_HOME']
26else:
27    dipy_home = pjoin(os.path.expanduser('~'), '.dipy')
28
29# The URL to the University of Washington Researchworks repository:
30UW_RW_URL = \
31    "https://digital.lib.washington.edu/researchworks/bitstream/handle/"
32
33
34class FetcherError(Exception):
35    pass
36
37
38def _log(msg):
39    """Helper function used as short hand for logging.
40    """
41    logger = logging.getLogger(__name__)
42    logger.info(msg)
43
44
45def copyfileobj_withprogress(fsrc, fdst, total_length, length=16 * 1024):
46    for ii in tqdm(range(0, int(total_length), length), unit=" MB"):
47        buf = fsrc.read(length)
48        if not buf:
49            break
50        fdst.write(buf)
51
52
53def _already_there_msg(folder):
54    """
55    Prints a message indicating that a certain data-set is already in place
56    """
57    msg = 'Dataset is already in place. If you want to fetch it again '
58    msg += 'please first remove the folder %s ' % folder
59    _log(msg)
60
61
62def _get_file_md5(filename):
63    """Compute the md5 checksum of a file"""
64    md5_data = md5()
65    with open(filename, 'rb') as f:
66        for chunk in iter(lambda: f.read(128 * md5_data.block_size), b''):
67            md5_data.update(chunk)
68    return md5_data.hexdigest()
69
70
71def check_md5(filename, stored_md5=None):
72    """
73    Computes the md5 of filename and check if it matches with the supplied
74    string md5
75
76    Parameters
77    -----------
78    filename : string
79        Path to a file.
80    md5 : string
81        Known md5 of filename to check against. If None (default), checking is
82        skipped
83    """
84    if stored_md5 is not None:
85        computed_md5 = _get_file_md5(filename)
86        if stored_md5 != computed_md5:
87            msg = """The downloaded file, %s, does not have the expected md5
88   checksum of "%s". Instead, the md5 checksum was: "%s". This could mean that
89   something is wrong with the file or that the upstream file has been updated.
90   You can try downloading the file again or updating to the newest version of
91   dipy.""" % (filename, stored_md5,
92                computed_md5)
93            raise FetcherError(msg)
94
95
96def _get_file_data(fname, url):
97    with contextlib.closing(urlopen(url)) as opener:
98        try:
99            response_size = opener.headers['content-length']
100        except KeyError:
101            response_size = None
102
103        with open(fname, 'wb') as data:
104            if(response_size is None):
105                copyfileobj(opener, data)
106            else:
107                copyfileobj_withprogress(opener, data, response_size)
108
109
110def fetch_data(files, folder, data_size=None):
111    """Downloads files to folder and checks their md5 checksums
112
113    Parameters
114    ----------
115    files : dictionary
116        For each file in `files` the value should be (url, md5). The file will
117        be downloaded from url if the file does not already exist or if the
118        file exists but the md5 checksum does not match.
119    folder : str
120        The directory where to save the file, the directory will be created if
121        it does not already exist.
122    data_size : str, optional
123        A string describing the size of the data (e.g. "91 MB") to be logged to
124        the screen. Default does not produce any information about data size.
125    Raises
126    ------
127    FetcherError
128        Raises if the md5 checksum of the file does not match the expected
129        value. The downloaded file is not deleted when this error is raised.
130
131    """
132    if not os.path.exists(folder):
133        _log("Creating new folder %s" % (folder))
134        os.makedirs(folder)
135
136    if data_size is not None:
137        _log('Data size is approximately %s' % data_size)
138
139    all_skip = True
140    for f in files:
141        url, md5 = files[f]
142        fullpath = pjoin(folder, f)
143        if os.path.exists(fullpath) and (_get_file_md5(fullpath) == md5):
144            continue
145        all_skip = False
146        _log('Downloading "%s" to %s' % (f, folder))
147        _get_file_data(fullpath, url)
148        check_md5(fullpath, md5)
149    if all_skip:
150        _already_there_msg(folder)
151    else:
152        _log("Files successfully downloaded to %s" % (folder))
153
154
155def _make_fetcher(name, folder, baseurl, remote_fnames, local_fnames,
156                  md5_list=None, doc="", data_size=None, msg=None,
157                  unzip=False):
158    """ Create a new fetcher
159
160    Parameters
161    ----------
162    name : str
163        The name of the fetcher function.
164    folder : str
165        The full path to the folder in which the files would be placed locally.
166        Typically, this is something like 'pjoin(dipy_home, 'foo')'
167    baseurl : str
168        The URL from which this fetcher reads files
169    remote_fnames : list of strings
170        The names of the files in the baseurl location
171    local_fnames : list of strings
172        The names of the files to be saved on the local filesystem
173    md5_list : list of strings, optional
174        The md5 checksums of the files. Used to verify the content of the
175        files. Default: None, skipping checking md5.
176    doc : str, optional.
177        Documentation of the fetcher.
178    data_size : str, optional.
179        If provided, is sent as a message to the user before downloading
180        starts.
181    msg : str, optional.
182        A message to print to screen when fetching takes place. Default (None)
183        is to print nothing
184    unzip : bool, optional
185        Whether to unzip the file(s) after downloading them. Supports zip, gz,
186        and tar.gz files.
187    returns
188    -------
189    fetcher : function
190        A function that, when called, fetches data according to the designated
191        inputs
192    """
193    def fetcher():
194        files = {}
195        for i, (f, n), in enumerate(zip(remote_fnames, local_fnames)):
196            files[n] = (baseurl + f, md5_list[i] if
197                        md5_list is not None else None)
198        fetch_data(files, folder, data_size)
199
200        if msg is not None:
201            _log(msg)
202        if unzip:
203            for f in local_fnames:
204                split_ext = os.path.splitext(f)
205                if split_ext[-1] == '.gz' or split_ext[-1] == '.bz2':
206                    if os.path.splitext(split_ext[0])[-1] == '.tar':
207                        ar = tarfile.open(pjoin(folder, f))
208                        ar.extractall(path=folder)
209                        ar.close()
210                    else:
211                        raise ValueError('File extension is not recognized')
212                elif split_ext[-1] == '.zip':
213                    z = zipfile.ZipFile(pjoin(folder, f), 'r')
214                    files[f] += (tuple(z.namelist()), )
215                    z.extractall(folder)
216                    z.close()
217                else:
218                    raise ValueError('File extension is not recognized')
219
220        return files, folder
221
222    fetcher.__name__ = name
223    fetcher.__doc__ = doc
224    return fetcher
225
226
227fetch_isbi2013_2shell = _make_fetcher(
228    "fetch_isbi2013_2shell",
229    pjoin(dipy_home, 'isbi2013'),
230    UW_RW_URL + '1773/38465/',
231    ['phantom64.nii.gz',
232     'phantom64.bval',
233     'phantom64.bvec'],
234    ['phantom64.nii.gz', 'phantom64.bval', 'phantom64.bvec'],
235    ['42911a70f232321cf246315192d69c42',
236     '90e8cf66e0f4d9737a3b3c0da24df5ea',
237     '4b7aa2757a1ccab140667b76e8075cb1'],
238    doc="Download a 2-shell software phantom dataset",
239    data_size="")
240
241fetch_stanford_labels = _make_fetcher(
242    "fetch_stanford_labels",
243    pjoin(dipy_home, 'stanford_hardi'),
244    'https://stacks.stanford.edu/file/druid:yx282xq2090/',
245    ["aparc-reduced.nii.gz", "label_info.txt"],
246    ["aparc-reduced.nii.gz", "label_info.txt"],
247    ['742de90090d06e687ce486f680f6d71a',
248     '39db9f0f5e173d7a2c2e51b07d5d711b'],
249    doc="Download reduced freesurfer aparc image from stanford web site")
250
251fetch_sherbrooke_3shell = _make_fetcher(
252    "fetch_sherbrooke_3shell",
253    pjoin(dipy_home, 'sherbrooke_3shell'),
254    UW_RW_URL + "1773/38475/",
255    ['HARDI193.nii.gz', 'HARDI193.bval', 'HARDI193.bvec'],
256    ['HARDI193.nii.gz', 'HARDI193.bval', 'HARDI193.bvec'],
257    ['0b735e8f16695a37bfbd66aab136eb66',
258     'e9b9bb56252503ea49d31fb30a0ac637',
259     '0c83f7e8b917cd677ad58a078658ebb7'],
260    doc="Download a 3shell HARDI dataset with 192 gradient direction")
261
262
263fetch_stanford_hardi = _make_fetcher(
264    "fetch_stanford_hardi",
265    pjoin(dipy_home, 'stanford_hardi'),
266    'https://stacks.stanford.edu/file/druid:yx282xq2090/',
267    ['dwi.nii.gz', 'dwi.bvals', 'dwi.bvecs'],
268    ['HARDI150.nii.gz', 'HARDI150.bval', 'HARDI150.bvec'],
269    ['0b18513b46132b4d1051ed3364f2acbc',
270     '4e08ee9e2b1d2ec3fddb68c70ae23c36',
271     '4c63a586f29afc6a48a5809524a76cb4'],
272    doc="Download a HARDI dataset with 160 gradient directions")
273
274fetch_stanford_t1 = _make_fetcher(
275    "fetch_stanford_t1",
276    pjoin(dipy_home, 'stanford_hardi'),
277    'https://stacks.stanford.edu/file/druid:yx282xq2090/',
278    ['t1.nii.gz'],
279    ['t1.nii.gz'],
280    ['a6a140da6a947d4131b2368752951b0a'])
281
282fetch_stanford_pve_maps = _make_fetcher(
283    "fetch_stanford_pve_maps",
284    pjoin(dipy_home, 'stanford_hardi'),
285    'https://stacks.stanford.edu/file/druid:yx282xq2090/',
286    ['pve_csf.nii.gz', 'pve_gm.nii.gz', 'pve_wm.nii.gz'],
287    ['pve_csf.nii.gz', 'pve_gm.nii.gz', 'pve_wm.nii.gz'],
288    ['2c498e4fed32bca7f726e28aa86e9c18',
289     '1654b20aeb35fc2734a0d7928b713874',
290     '2e244983cf92aaf9f9d37bc7716b37d5'])
291
292fetch_taiwan_ntu_dsi = _make_fetcher(
293    "fetch_taiwan_ntu_dsi",
294    pjoin(dipy_home, 'taiwan_ntu_dsi'),
295    UW_RW_URL + "1773/38480/",
296    ['DSI203.nii.gz', 'DSI203.bval', 'DSI203.bvec', 'DSI203_license.txt'],
297    ['DSI203.nii.gz', 'DSI203.bval', 'DSI203.bvec', 'DSI203_license.txt'],
298    ['950408c0980a7154cb188666a885a91f',
299     '602e5cb5fad2e7163e8025011d8a6755',
300     'a95eb1be44748c20214dc7aa654f9e6b',
301     '7fa1d5e272533e832cc7453eeba23f44'],
302    doc="Download a DSI dataset with 203 gradient directions",
303    msg="See DSI203_license.txt for LICENSE. For the complete datasets" +
304        " please visit http://dsi-studio.labsolver.org",
305    data_size="91MB")
306
307fetch_syn_data = _make_fetcher(
308    "fetch_syn_data",
309    pjoin(dipy_home, 'syn_test'),
310    UW_RW_URL + "1773/38476/",
311    ['t1.nii.gz', 'b0.nii.gz'],
312    ['t1.nii.gz', 'b0.nii.gz'],
313    ['701bda02bb769655c7d4a9b1df2b73a6',
314     'e4b741f0c77b6039e67abb2885c97a78'],
315    data_size="12MB",
316    doc="Download t1 and b0 volumes from the same session")
317
318fetch_mni_template = _make_fetcher(
319    "fetch_mni_template",
320    pjoin(dipy_home, 'mni_template'),
321    'https://ndownloader.figshare.com/files/',
322    ['5572676?private_link=4b8666116a0128560fb5',
323     '5572673?private_link=93216e750d5a7e568bda',
324     '5572670?private_link=33c92d54d1afb9aa7ed2',
325     '5572661?private_link=584319b23e7343fed707'],
326    ['mni_icbm152_t2_tal_nlin_asym_09a.nii',
327     'mni_icbm152_t1_tal_nlin_asym_09a.nii',
328     'mni_icbm152_t1_tal_nlin_asym_09c_mask.nii',
329     'mni_icbm152_t1_tal_nlin_asym_09c.nii'],
330    ['f41f2e1516d880547fbf7d6a83884f0d',
331     '1ea8f4f1e41bc17a94602e48141fdbc8',
332     'a243e249cd01a23dc30f033b9656a786',
333     '3d5dd9b0cd727a17ceec610b782f66c1'],
334    doc="fetch the MNI 2009a T1 and T2, and 2009c T1 and T1 mask files",
335    data_size="70MB")
336
337fetch_scil_b0 = _make_fetcher(
338    "fetch_scil_b0",
339    dipy_home,
340    UW_RW_URL + "1773/38479/",
341    ['datasets_multi-site_all_companies.zip'],
342    ['datasets_multi-site_all_companies.zip'],
343    ["e9810fa5bf21b99da786647994d7d5b7"],
344    doc="Download b=0 datasets from multiple MR systems (GE, Philips, " +
345        "Siemens) and different magnetic fields (1.5T and 3T)",
346    data_size="9.2MB",
347    unzip=True)
348
349fetch_bundles_2_subjects = _make_fetcher(
350    "fetch_bundles_2_subjects",
351    pjoin(dipy_home, 'exp_bundles_and_maps'),
352    UW_RW_URL + '1773/38477/',
353    ['bundles_2_subjects.tar.gz'],
354    ['bundles_2_subjects.tar.gz'],
355    ['97756fbef11ce2df31f1bedf1fc7aac7'],
356    data_size="234MB",
357    doc="Download 2 subjects from the SNAIL dataset with their bundles",
358    unzip=True)
359
360fetch_ivim = _make_fetcher(
361    "fetch_ivim",
362    pjoin(dipy_home, 'ivim'),
363    'https://ndownloader.figshare.com/files/',
364    ['5305243', '5305246', '5305249'],
365    ['ivim.nii.gz', 'ivim.bval', 'ivim.bvec'],
366    ['cda596f89dc2676af7d9bf1cabccf600',
367     'f03d89f84aa9a9397103a400e43af43a',
368     'fb633a06b02807355e49ccd85cb92565'],
369    doc="Download IVIM dataset")
370
371fetch_cfin_multib = _make_fetcher(
372    "fetch_cfin_multib",
373    pjoin(dipy_home, 'cfin_multib'),
374    UW_RW_URL + '/1773/38488/',
375    ['T1.nii',
376     '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.nii',
377     '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.bval',
378     '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.bvec'],
379    ['T1.nii',
380     '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.nii',
381     '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.bval',
382     '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.bvec'],
383    ['889883b5e7d93a6e372bc760ea887e7c',
384     '9daea1d01d68fd0055a3b34f5ffd5f6e',
385     '3ee44135fde7ea5c9b8c801414bdde2c',
386     '948373391de950e7cc1201ba9f696bf0'],
387    doc="Download CFIN multi b-value diffusion data",
388    msg=("This data was provided by Brian Hansen and Sune Jespersen" +
389         " More details about the data are available in their paper: " +
390         " https://www.nature.com/articles/sdata201672"))
391
392fetch_file_formats = _make_fetcher(
393    "bundle_file_formats_example",
394    pjoin(dipy_home, 'bundle_file_formats_example'),
395    'https://zenodo.org/record/3352379/files/',
396    ['cc_m_sub.trk', 'laf_m_sub.tck', 'lpt_m_sub.fib',
397     'raf_m_sub.vtk', 'rpt_m_sub.dpy', 'template0.nii.gz'],
398    ['cc_m_sub.trk', 'laf_m_sub.tck', 'lpt_m_sub.fib',
399     'raf_m_sub.vtk', 'rpt_m_sub.dpy', 'template0.nii.gz'],
400    ['78ed7bead3e129fb4b4edd6da1d7e2d2', '20009796ccd43dc8d2d5403b25dff717',
401     '8afa8419e2efe04ede75cce1f53c77d8', '9edcbea30c7a83b467c3cdae6ce963c8',
402     '42bff2538a650a7ff1e57bfd9ed90ad6', '99c37a2134026d2c4bbb7add5088ddc6'],
403    doc="Download 5 bundles in various file formats and their reference",
404    data_size="25MB")
405
406fetch_bundle_atlas_hcp842 = _make_fetcher(
407    "fetch_bundle_atlas_hcp842",
408    pjoin(dipy_home, 'bundle_atlas_hcp842'),
409    'https://ndownloader.figshare.com/files/',
410    ['13638644'],
411    ['Atlas_80_Bundles.zip'],
412    ['78331d527a10ec000d4f33bac472e099'],
413    doc="Download atlas tractogram from the hcp842 dataset with 80 bundles",
414    data_size="300MB",
415    unzip=True)
416
417fetch_target_tractogram_hcp = _make_fetcher(
418    "fetch_target_tractogram_hcp",
419    pjoin(dipy_home, 'target_tractogram_hcp'),
420    'https://ndownloader.figshare.com/files/',
421    ['12871127'],
422    ['hcp_tractogram.zip'],
423    ['fa25ef19c9d3748929b6423397963b6a'],
424    doc="Download tractogram of one of the hcp dataset subjects",
425    data_size="541MB",
426    unzip=True)
427
428
429fetch_bundle_fa_hcp = _make_fetcher(
430    "fetch_bundle_fa_hcp",
431    pjoin(dipy_home, 'bundle_fa_hcp'),
432    'https://ndownloader.figshare.com/files/',
433    ['14035265'],
434    ['hcp_bundle_fa.nii.gz'],
435    ['2d5c0036b0575597378ddf39191028ea'],
436    doc=("Download map of FA within two bundles in one" +
437         "of the hcp dataset subjects"),
438    data_size="230kb")
439
440
441fetch_qtdMRI_test_retest_2subjects = _make_fetcher(
442    "fetch_qtdMRI_test_retest_2subjects",
443    pjoin(dipy_home, 'qtdMRI_test_retest_2subjects'),
444    'https://zenodo.org/record/996889/files/',
445    ['subject1_dwis_test.nii.gz', 'subject2_dwis_test.nii.gz',
446     'subject1_dwis_retest.nii.gz', 'subject2_dwis_retest.nii.gz',
447     'subject1_ccmask_test.nii.gz', 'subject2_ccmask_test.nii.gz',
448     'subject1_ccmask_retest.nii.gz', 'subject2_ccmask_retest.nii.gz',
449     'subject1_scheme_test.txt', 'subject2_scheme_test.txt',
450     'subject1_scheme_retest.txt', 'subject2_scheme_retest.txt'],
451    ['subject1_dwis_test.nii.gz', 'subject2_dwis_test.nii.gz',
452     'subject1_dwis_retest.nii.gz', 'subject2_dwis_retest.nii.gz',
453     'subject1_ccmask_test.nii.gz', 'subject2_ccmask_test.nii.gz',
454     'subject1_ccmask_retest.nii.gz', 'subject2_ccmask_retest.nii.gz',
455     'subject1_scheme_test.txt', 'subject2_scheme_test.txt',
456     'subject1_scheme_retest.txt', 'subject2_scheme_retest.txt'],
457    ['ebd7441f32c40e25c28b9e069bd81981',
458     'dd6a64dd68c8b321c75b9d5fb42c275a',
459     '830a7a028a66d1b9812f93309a3f9eae',
460     'd7f1951e726c35842f7ea0a15d990814',
461     'ddb8dfae908165d5e82c846bcc317cab',
462     '5630c06c267a0f9f388b07b3e563403c',
463     '02e9f92b31e8980f658da99e532e14b5',
464     '6e7ce416e7cfda21cecce3731f81712b',
465     '957cb969f97d89e06edd7a04ffd61db0',
466     '5540c0c9bd635c29fc88dd599cbbf5e6',
467     '5540c0c9bd635c29fc88dd599cbbf5e6',
468     '5540c0c9bd635c29fc88dd599cbbf5e6'],
469    doc="Downloads test-retest qt-dMRI acquisitions of two C57Bl6 mice.",
470    data_size="298.2MB")
471
472
473fetch_gold_standard_io = _make_fetcher(
474    "fetch_gold_standard_io",
475    pjoin(dipy_home, 'gold_standard_io'),
476    'https://zenodo.org/record/2651349/files/',
477    ['gs.trk', 'gs.tck', 'gs.fib', 'gs.dpy', 'gs.nii', 'gs_3mm.nii',
478     'gs_rasmm_space.txt', 'gs_voxmm_space.txt', 'gs_vox_space.txt',
479     'points_data.txt', 'streamlines_data.txt'],
480    ['gs.trk', 'gs.tck', 'gs.fib', 'gs.dpy', 'gs.nii', 'gs_3mm.nii',
481     'gs_rasmm_space.txt', 'gs_voxmm_space.txt', 'gs_vox_space.txt',
482     'points_data.json', 'streamlines_data.json'],
483    ['3acf565779f4d5107f96b2ef90578d64',
484     '151a30cf356c002060d720bf9d577245',
485     'e9818e07bef5bd605dea0877df14a2b0',
486     '248606297e400d1a9b1786845aad8de3',
487     'a2d4d8f62d1de0ab9927782c7d51cb27',
488     '217b3ae0712a02b2463b8eedfe9a0a68',
489     'ca193a5508d3313d542231aaf262960f',
490     '3284de59dfd9ca3130e6e01258ed9022',
491     'a2a89c387f45adab733652a92f6602d5',
492     '4bcca0c6195871fc05e93cdfabec22b4',
493     '578f29052ac03a6d8a98580eb7c70d97'],
494    doc="Downloads the gold standard for streamlines io testing.",
495    data_size="47.KB")
496
497
498fetch_fury_surface = _make_fetcher(
499    "fetch_fury_surface",
500    pjoin(dipy_home, 'fury_surface'),
501    'https://raw.githubusercontent.com/fury-gl/fury-data/master/surfaces/',
502    ['100307_white_lh.vtk'],
503    ['100307_white_lh.vtk'],
504    ['dbec91e29af15541a5cb36d80977b26b'],
505    doc="Surface for testing and examples",
506    data_size="11MB")
507
508
509def get_fnames(name='small_64D'):
510    """Provide full paths to example or test datasets.
511
512    Parameters
513    ----------
514    name : str
515        the filename/s of which dataset to return, one of:
516        - 'small_64D' small region of interest nifti,bvecs,bvals 64 directions
517        - 'small_101D' small region of interest nifti, bvecs, bvals
518          101 directions
519        - 'aniso_vox' volume with anisotropic voxel size as Nifti
520        - 'fornix' 300 tracks in Trackvis format (from Pittsburgh
521          Brain Competition)
522        - 'gqi_vectors' the scanner wave vectors needed for a GQI acquisitions
523          of 101 directions tested on Siemens 3T Trio
524        - 'small_25' small ROI (10x8x2) DTI data (b value 2000, 25 directions)
525        - 'test_piesno' slice of N=8, K=14 diffusion data
526        - 'reg_c' small 2D image used for validating registration
527        - 'reg_o' small 2D image used for validation registration
528        - 'cb_2' two vectorized cingulum bundles
529
530    Returns
531    -------
532    fnames : tuple
533        filenames for dataset
534
535    Examples
536    --------
537    >>> import numpy as np
538    >>> from dipy.io.image import load_nifti
539    >>> from dipy.data import get_fnames
540    >>> fimg, fbvals, fbvecs = get_fnames('small_101D')
541    >>> bvals=np.loadtxt(fbvals)
542    >>> bvecs=np.loadtxt(fbvecs).T
543    >>> data, affine = load_nifti(fimg)
544    >>> data.shape == (6, 10, 10, 102)
545    True
546    >>> bvals.shape == (102,)
547    True
548    >>> bvecs.shape == (102, 3)
549    True
550
551    """
552    DATA_DIR = pjoin(os.path.dirname(__file__), 'files')
553    if name == 'small_64D':
554        fbvals = pjoin(DATA_DIR, 'small_64D.bval')
555        fbvecs = pjoin(DATA_DIR, 'small_64D.bvec')
556        fimg = pjoin(DATA_DIR, 'small_64D.nii')
557        return fimg, fbvals, fbvecs
558    if name == '55dir_grad.bvec':
559        return pjoin(DATA_DIR, '55dir_grad.bvec')
560    if name == 'small_101D':
561        fbvals = pjoin(DATA_DIR, 'small_101D.bval')
562        fbvecs = pjoin(DATA_DIR, 'small_101D.bvec')
563        fimg = pjoin(DATA_DIR, 'small_101D.nii.gz')
564        return fimg, fbvals, fbvecs
565    if name == 'aniso_vox':
566        return pjoin(DATA_DIR, 'aniso_vox.nii.gz')
567    if name == 'ascm_test':
568        return pjoin(DATA_DIR, 'ascm_out_test.nii.gz')
569    if name == 'fornix':
570        return pjoin(DATA_DIR, 'tracks300.trk')
571    if name == 'gqi_vectors':
572        return pjoin(DATA_DIR, 'ScannerVectors_GQI101.txt')
573    if name == 'dsi515btable':
574        return pjoin(DATA_DIR, 'dsi515_b_table.txt')
575    if name == 'dsi4169btable':
576        return pjoin(DATA_DIR, 'dsi4169_b_table.txt')
577    if name == 'grad514':
578        return pjoin(DATA_DIR, 'grad_514.txt')
579    if name == "small_25":
580        fbvals = pjoin(DATA_DIR, 'small_25.bval')
581        fbvecs = pjoin(DATA_DIR, 'small_25.bvec')
582        fimg = pjoin(DATA_DIR, 'small_25.nii.gz')
583        return fimg, fbvals, fbvecs
584    if name == 'small_25_streamlines':
585        fstreamlines = pjoin(DATA_DIR, 'EuDX_small_25.trk')
586        return fstreamlines
587    if name == "S0_10":
588        fimg = pjoin(DATA_DIR, 'S0_10slices.nii.gz')
589        return fimg
590    if name == "test_piesno":
591        fimg = pjoin(DATA_DIR, 'test_piesno.nii.gz')
592        return fimg
593    if name == "reg_c":
594        return pjoin(DATA_DIR, 'C.npy')
595    if name == "reg_o":
596        return pjoin(DATA_DIR, 'circle.npy')
597    if name == 'cb_2':
598        return pjoin(DATA_DIR, 'cb_2.npz')
599    if name == "t1_coronal_slice":
600        return pjoin(DATA_DIR, 't1_coronal_slice.npy')
601    if name == "t-design":
602        N = 45
603        return pjoin(DATA_DIR, 'tdesign' + str(N) + '.txt')
604    if name == 'scil_b0':
605        files, folder = fetch_scil_b0()
606        files = files['datasets_multi-site_all_companies.zip'][2]
607        files = [pjoin(folder, f) for f in files]
608        return [f for f in files if os.path.isfile(f)]
609    if name == 'stanford_hardi':
610        files, folder = fetch_stanford_hardi()
611        fraw = pjoin(folder, 'HARDI150.nii.gz')
612        fbval = pjoin(folder, 'HARDI150.bval')
613        fbvec = pjoin(folder, 'HARDI150.bvec')
614        return fraw, fbval, fbvec
615    if name == 'taiwan_ntu_dsi':
616        files, folder = fetch_taiwan_ntu_dsi()
617        fraw = pjoin(folder, 'DSI203.nii.gz')
618        fbval = pjoin(folder, 'DSI203.bval')
619        fbvec = pjoin(folder, 'DSI203.bvec')
620        return fraw, fbval, fbvec
621    if name == 'sherbrooke_3shell':
622        files, folder = fetch_sherbrooke_3shell()
623        fraw = pjoin(folder, 'HARDI193.nii.gz')
624        fbval = pjoin(folder, 'HARDI193.bval')
625        fbvec = pjoin(folder, 'HARDI193.bvec')
626        return fraw, fbval, fbvec
627    if name == 'isbi2013_2shell':
628        files, folder = fetch_isbi2013_2shell()
629        fraw = pjoin(folder, 'phantom64.nii.gz')
630        fbval = pjoin(folder, 'phantom64.bval')
631        fbvec = pjoin(folder, 'phantom64.bvec')
632        return fraw, fbval, fbvec
633    if name == 'stanford_labels':
634        files, folder = fetch_stanford_labels()
635        return pjoin(folder, "aparc-reduced.nii.gz")
636    if name == 'syn_data':
637        files, folder = fetch_syn_data()
638        t1_name = pjoin(folder, 't1.nii.gz')
639        b0_name = pjoin(folder, 'b0.nii.gz')
640        return t1_name, b0_name
641    if name == 'stanford_t1':
642        files, folder = fetch_stanford_t1()
643        return pjoin(folder, 't1.nii.gz')
644    if name == 'stanford_pve_maps':
645        files, folder = fetch_stanford_pve_maps()
646        f_pve_csf = pjoin(folder, 'pve_csf.nii.gz')
647        f_pve_gm = pjoin(folder, 'pve_gm.nii.gz')
648        f_pve_wm = pjoin(folder, 'pve_wm.nii.gz')
649        return f_pve_csf, f_pve_gm, f_pve_wm
650    if name == 'ivim':
651        files, folder = fetch_ivim()
652        fraw = pjoin(folder, 'ivim.nii.gz')
653        fbval = pjoin(folder, 'ivim.bval')
654        fbvec = pjoin(folder, 'ivim.bvec')
655        return fraw, fbval, fbvec
656    if name == 'tissue_data':
657        files, folder = fetch_tissue_data()
658        t1_name = pjoin(folder, 't1_brain.nii.gz')
659        t1d_name = pjoin(folder, 't1_brain_denoised.nii.gz')
660        ap_name = pjoin(folder, 'power_map.nii.gz')
661        return t1_name, t1d_name, ap_name
662    if name == 'cfin_multib':
663        files, folder = fetch_cfin_multib()
664        t1_name = pjoin(folder, 'T1.nii')
665        fraw = pjoin(folder, '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.nii')
666        fbval = pjoin(folder,
667                      '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.bval')
668        fbvec = pjoin(folder,
669                      '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.bvec')
670        return fraw, fbval, fbvec, t1_name
671    if name == 'target_tractrogram_hcp':
672        files, folder = fetch_target_tractogram_hcp()
673        return pjoin(folder, 'target_tractogram_hcp', 'hcp_tractogram',
674                     'streamlines.trk')
675    if name == 'bundle_atlas_hcp842':
676        files, folder = fetch_bundle_atlas_hcp842()
677        return get_bundle_atlas_hcp842()
678    if name == 'fury_surface':
679        files, folder = fetch_fury_surface()
680        surface_name = pjoin(folder, '100307_white_lh.vtk')
681        return surface_name
682
683
684def read_qtdMRI_test_retest_2subjects():
685    """ Load test-retest qt-dMRI acquisitions of two C57Bl6 mice. These
686    datasets were used to study test-retest reproducibility of time-dependent
687    q-space indices (q$\tau$-indices) in the corpus callosum of two mice [1].
688    The data itself and its details are publicly available and can be cited at
689    [2].
690
691    The test-retest diffusion MRI spin echo sequences were acquired from two
692    C57Bl6 wild-type mice on an 11.7 Tesla Bruker scanner. The test and retest
693    acquisition were taken 48 hours from each other. The (processed) data
694    consists of 80x160x5 voxels of size 110x110x500μm. Each data set consists
695    of 515 Diffusion-Weighted Images (DWIs) spread over 35 acquisition shells.
696    The shells are spread over 7 gradient strength shells with a maximum
697    gradient strength of 491 mT/m, 5 pulse separation shells between
698    [10.8 - 20.0]ms, and a pulse length of 5ms. We manually created a brain
699    mask and corrected the data from eddy currents and motion artifacts using
700    FSL's eddy. A region of interest was then drawn in the middle slice in the
701    corpus callosum, where the tissue is reasonably coherent.
702
703    Returns
704    -------
705    data : list of length 4
706        contains the dwi datasets ordered as
707        (subject1_test, subject1_retest, subject2_test, subject2_retest)
708    cc_masks : list of length 4
709        contains the corpus callosum masks ordered in the same order as data.
710    gtabs : list of length 4
711        contains the qt-dMRI gradient tables of the data sets.
712
713    References
714    ----------
715    .. [1] Fick, Rutger HJ, et al. "Non-Parametric GraphNet-Regularized
716        Representation of dMRI in Space and Time", Medical Image Analysis,
717        2017.
718    .. [2] Wassermann, Demian, et al., "Test-Retest qt-dMRI datasets for
719        `Non-Parametric GraphNet-Regularized Representation of dMRI in Space
720        and Time'". doi:10.5281/zenodo.996889, 2017.
721    """
722    data = []
723    data_names = [
724        'subject1_dwis_test.nii.gz', 'subject1_dwis_retest.nii.gz',
725        'subject2_dwis_test.nii.gz', 'subject2_dwis_retest.nii.gz'
726    ]
727    for data_name in data_names:
728        data_loc = pjoin(dipy_home, 'qtdMRI_test_retest_2subjects', data_name)
729        data.append(load_nifti_data(data_loc))
730
731    cc_masks = []
732    mask_names = [
733        'subject1_ccmask_test.nii.gz', 'subject1_ccmask_retest.nii.gz',
734        'subject2_ccmask_test.nii.gz', 'subject2_ccmask_retest.nii.gz'
735    ]
736    for mask_name in mask_names:
737        mask_loc = pjoin(dipy_home, 'qtdMRI_test_retest_2subjects', mask_name)
738        cc_masks.append(load_nifti_data(mask_loc))
739
740    gtabs = []
741    gtab_txt_names = [
742        'subject1_scheme_test.txt', 'subject1_scheme_retest.txt',
743        'subject2_scheme_test.txt', 'subject2_scheme_retest.txt'
744    ]
745    for gtab_txt_name in gtab_txt_names:
746        txt_loc = pjoin(dipy_home, 'qtdMRI_test_retest_2subjects',
747                        gtab_txt_name)
748        qtdmri_scheme = np.loadtxt(txt_loc, skiprows=1)
749        bvecs = qtdmri_scheme[:, 1:4]
750        G = qtdmri_scheme[:, 4] / 1e3  # because dipy takes T/mm not T/m
751        small_delta = qtdmri_scheme[:, 5]
752        big_delta = qtdmri_scheme[:, 6]
753        gtab = gradient_table_from_gradient_strength_bvecs(
754            G, bvecs, big_delta, small_delta
755        )
756        gtabs.append(gtab)
757
758    return data, cc_masks, gtabs
759
760
761def read_scil_b0():
762    """Load GE 3T b0 image form the scil b0 dataset.
763
764    Returns
765    -------
766    img : obj,
767        Nifti1Image
768
769    """
770    fnames = get_fnames('scil_b0')
771    return nib.load(fnames[0])
772
773
774def read_siemens_scil_b0():
775    """Load Siemens 1.5T b0 image from the scil b0 dataset.
776
777    Returns
778    -------
779    img : obj,
780        Nifti1Image
781
782    """
783    fnames = get_fnames('scil_b0')
784    return nib.load(fnames[1])
785
786
787def read_isbi2013_2shell():
788    """Load ISBI 2013 2-shell synthetic dataset.
789
790    Returns
791    -------
792    img : obj,
793        Nifti1Image
794    gtab : obj,
795        GradientTable
796
797    """
798    fraw, fbval, fbvec = get_fnames('isbi2013_2shell')
799    bvals, bvecs = read_bvals_bvecs(fbval, fbvec)
800    gtab = gradient_table(bvals, bvecs)
801    img = nib.load(fraw)
802    return img, gtab
803
804
805def read_sherbrooke_3shell():
806    """Load Sherbrooke 3-shell HARDI dataset.
807
808    Returns
809    -------
810    img : obj,
811        Nifti1Image
812    gtab : obj,
813        GradientTable
814
815    """
816    fraw, fbval, fbvec = get_fnames('sherbrooke_3shell')
817    bvals, bvecs = read_bvals_bvecs(fbval, fbvec)
818    gtab = gradient_table(bvals, bvecs)
819    img = nib.load(fraw)
820    return img, gtab
821
822
823def read_stanford_labels():
824    """Read stanford hardi data and label map."""
825    # First get the hardi data
826    hard_img, gtab = read_stanford_hardi()
827    # Fetch and load
828    labels_file = get_fnames('stanford_labels')
829    labels_img = nib.load(labels_file)
830    return hard_img, gtab, labels_img
831
832
833def read_stanford_hardi():
834    """Load Stanford HARDI dataset.
835
836    Returns
837    -------
838    img : obj,
839        Nifti1Image
840    gtab : obj,
841        GradientTable
842
843    """
844    fraw, fbval, fbvec = get_fnames('stanford_hardi')
845    bvals, bvecs = read_bvals_bvecs(fbval, fbvec)
846    gtab = gradient_table(bvals, bvecs)
847    img = nib.load(fraw)
848    return img, gtab
849
850
851def read_stanford_t1():
852    f_t1 = get_fnames('stanford_t1')
853    img = nib.load(f_t1)
854    return img
855
856
857def read_stanford_pve_maps():
858    f_pve_csf, f_pve_gm, f_pve_wm = get_fnames('stanford_pve_maps')
859    img_pve_csf = nib.load(f_pve_csf)
860    img_pve_gm = nib.load(f_pve_gm)
861    img_pve_wm = nib.load(f_pve_wm)
862    return (img_pve_csf, img_pve_gm, img_pve_wm)
863
864
865def read_taiwan_ntu_dsi():
866    """Load Taiwan NTU dataset.
867
868    Returns
869    -------
870    img : obj,
871        Nifti1Image
872    gtab : obj,
873        GradientTable
874
875    """
876    fraw, fbval, fbvec = get_fnames('taiwan_ntu_dsi')
877    bvals, bvecs = read_bvals_bvecs(fbval, fbvec)
878    bvecs[1:] = (bvecs[1:] /
879                 np.sqrt(np.sum(bvecs[1:] * bvecs[1:], axis=1))[:, None])
880    gtab = gradient_table(bvals, bvecs)
881    img = nib.load(fraw)
882    return img, gtab
883
884
885def read_syn_data():
886    """Load t1 and b0 volumes from the same session.
887
888    Returns
889    -------
890    t1 : obj,
891        Nifti1Image
892    b0 : obj,
893        Nifti1Image
894
895    """
896    t1_name, b0_name = get_fnames('syn_data')
897    t1 = nib.load(t1_name)
898    b0 = nib.load(b0_name)
899    return t1, b0
900
901
902def fetch_tissue_data():
903    """ Download images to be used for tissue classification
904    """
905
906    t1 = 'https://ndownloader.figshare.com/files/6965969'
907    t1d = 'https://ndownloader.figshare.com/files/6965981'
908    ap = 'https://ndownloader.figshare.com/files/6965984'
909
910    folder = pjoin(dipy_home, 'tissue_data')
911
912    md5_list = ['99c4b77267a6855cbfd96716d5d65b70',  # t1
913                '4b87e1b02b19994fbd462490cc784fa3',  # t1d
914                'c0ea00ed7f2ff8b28740f18aa74bff6a']  # ap
915
916    url_list = [t1, t1d, ap]
917    fname_list = ['t1_brain.nii.gz', 't1_brain_denoised.nii.gz',
918                  'power_map.nii.gz']
919
920    if not os.path.exists(folder):
921        _log('Creating new directory %s' % folder)
922        os.makedirs(folder)
923        msg = 'Downloading 3 Nifti1 images (9.3MB)...'
924        _log(msg)
925
926        for i in range(len(md5_list)):
927            _get_file_data(pjoin(folder, fname_list[i]), url_list[i])
928            check_md5(pjoin(folder, fname_list[i]), md5_list[i])
929
930        _log('Done.')
931        _log('Files copied in folder %s' % folder)
932    else:
933        _already_there_msg(folder)
934
935    return fname_list, folder
936
937
938def read_tissue_data(contrast='T1'):
939    """ Load images to be used for tissue classification
940
941    Parameters
942    ----------
943    constrast : str
944        'T1', 'T1 denoised' or 'Anisotropic Power'
945
946    Returns
947    -------
948    image : obj,
949        Nifti1Image
950
951    """
952    folder = pjoin(dipy_home, 'tissue_data')
953    t1_name = pjoin(folder, 't1_brain.nii.gz')
954    t1d_name = pjoin(folder, 't1_brain_denoised.nii.gz')
955    ap_name = pjoin(folder, 'power_map.nii.gz')
956
957    md5_dict = {'t1': '99c4b77267a6855cbfd96716d5d65b70',
958                't1d': '4b87e1b02b19994fbd462490cc784fa3',
959                'ap': 'c0ea00ed7f2ff8b28740f18aa74bff6a'}
960
961    check_md5(t1_name, md5_dict['t1'])
962    check_md5(t1d_name, md5_dict['t1d'])
963    check_md5(ap_name, md5_dict['ap'])
964
965    if contrast == 'T1 denoised':
966        return nib.load(t1d_name)
967    elif contrast == 'Anisotropic Power':
968        return nib.load(ap_name)
969    else:
970        return nib.load(t1_name)
971
972
973mni_notes = \
974    """
975    Notes
976    -----
977    The templates were downloaded from the MNI (McGill University)
978    `website <http://www.bic.mni.mcgill.ca/ServicesAtlases/ICBM152NLin2009>`_
979    in July 2015.
980
981    The following publications should be referenced when using these templates:
982
983    .. [1] VS Fonov, AC Evans, K Botteron, CR Almli, RC McKinstry, DL Collins
984           and BDCG, Unbiased average age-appropriate atlases for pediatric
985           studies, NeuroImage, 54:1053-8119,
986           DOI: 10.1016/j.neuroimage.2010.07.033
987
988    .. [2] VS Fonov, AC Evans, RC McKinstry, CR Almli and DL Collins,
989            Unbiased nonlinear average age-appropriate brain templates from
990            birth to adulthood, NeuroImage, 47:S102
991            Organization for Human Brain Mapping 2009 Annual Meeting,
992            DOI: https://doi.org/10.1016/S1053-8119(09)70884-5
993
994    **License for the MNI templates:**
995
996    Copyright (C) 1993-2004, Louis Collins McConnell Brain Imaging Centre,
997    Montreal Neurological Institute, McGill University. Permission to use,
998    copy, modify, and distribute this software and its documentation for any
999    purpose and without fee is hereby granted, provided that the above
1000    copyright notice appear in all copies. The authors and McGill University
1001    make no representations about the suitability of this software for any
1002    purpose. It is provided "as is" without express or implied warranty. The
1003    authors are not responsible for any data loss, equipment damage, property
1004    loss, or injury to subjects or patients resulting from the use or misuse
1005    of this software package.
1006"""
1007
1008
1009def read_mni_template(version="a", contrast="T2"):
1010    """Read the MNI template from disk.
1011
1012    Parameters
1013    ----------
1014    version: string
1015        There are two MNI templates 2009a and 2009c, so options available are:
1016        "a" and "c".
1017    contrast : list or string, optional
1018        Which of the contrast templates to read. For version "a" two contrasts
1019        are available: "T1" and "T2". Similarly for version "c" there are two
1020        options, "T1" and "mask". You can input contrast as a string or a list
1021
1022    Returns
1023    -------
1024    list : contains the nibabel.Nifti1Image objects requested, according to the
1025        order they were requested in the input.
1026
1027    Examples
1028    --------
1029    >>> # Get only the T1 file for version c:
1030    >>> T1 = read_mni_template("c", contrast = "T1") # doctest: +SKIP
1031    >>> # Get both files in this order for version a:
1032    >>> T1, T2 = read_mni_template(contrast = ["T1", "T2"]) # doctest: +SKIP
1033
1034    """
1035    files, folder = fetch_mni_template()
1036    file_dict_a = {"T1": pjoin(folder, 'mni_icbm152_t1_tal_nlin_asym_09a.nii'),
1037                   "T2": pjoin(folder, 'mni_icbm152_t2_tal_nlin_asym_09a.nii')}
1038
1039    file_dict_c = {
1040        "T1": pjoin(
1041            folder, 'mni_icbm152_t1_tal_nlin_asym_09c.nii'), "mask": pjoin(
1042            folder, 'mni_icbm152_t1_tal_nlin_asym_09c_mask.nii')}
1043
1044    if contrast == "T2" and version == "c":
1045        raise ValueError("No T2 image for MNI template 2009c")
1046
1047    if contrast == "mask" and version == "a":
1048        raise ValueError("No template mask available for MNI 2009a")
1049
1050    if not(isinstance(contrast, str)) and version == "c":
1051        for k in contrast:
1052            if k == "T2":
1053                raise ValueError("No T2 image for MNI template 2009c")
1054
1055    if version == "a":
1056        if isinstance(contrast, str):
1057            return nib.load(file_dict_a[contrast])
1058        else:
1059            out_list = []
1060            for k in contrast:
1061                out_list.append(nib.load(file_dict_a[k]))
1062    elif version == "c":
1063        if isinstance(contrast, str):
1064            return nib.load(file_dict_c[contrast])
1065        else:
1066            out_list = []
1067            for k in contrast:
1068                out_list.append(nib.load(file_dict_c[k]))
1069    else:
1070        raise ValueError("Only 2009a and 2009c versions are available")
1071    return out_list
1072
1073
1074# Add the references to both MNI-related functions:
1075read_mni_template.__doc__ += mni_notes
1076fetch_mni_template.__doc__ += mni_notes
1077
1078
1079def fetch_cenir_multib(with_raw=False):
1080    """Fetch 'HCP-like' data, collected at multiple b-values.
1081
1082    Parameters
1083    ----------
1084    with_raw : bool
1085        Whether to fetch the raw data. Per default, this is False, which means
1086        that only eddy-current/motion corrected data is fetched
1087    """
1088    folder = pjoin(dipy_home, 'cenir_multib')
1089
1090    fname_list = ['4D_dwi_eddycor_B200.nii.gz',
1091                  'dwi_bvals_B200', 'dwi_bvecs_B200',
1092                  '4D_dwieddycor_B400.nii.gz',
1093                  'bvals_B400', 'bvecs_B400',
1094                  '4D_dwieddycor_B1000.nii.gz',
1095                  'bvals_B1000', 'bvecs_B1000',
1096                  '4D_dwieddycor_B2000.nii.gz',
1097                  'bvals_B2000', 'bvecs_B2000',
1098                  '4D_dwieddycor_B3000.nii.gz',
1099                  'bvals_B3000', 'bvecs_B3000']
1100
1101    md5_list = ['fd704aa3deb83c1c7229202cb3db8c48',
1102                '80ae5df76a575fe5bf9f1164bb0d4cfb',
1103                '18e90f8a3e6a4db2457e5b1ba1cc98a9',
1104                '3d0f2b8ef7b6a4a3aa5c4f7a90c9cfec',
1105                'c38056c40c9cc42372232d6e75c47f54',
1106                '810d79b4c30cb7dff3b2000017d5f72a',
1107                'dde8037601a14436b2173f4345b5fd17',
1108                '97de6a492ae304f39e0b418b6ebac64c',
1109                'f28a0faa701bdfc66e31bde471a5b992',
1110                'c5e4b96e3afdee99c0e994eff3b2331a',
1111                '9c83b8d5caf9c3def240f320f2d2f56c',
1112                '05446bd261d57193d8dbc097e06db5ff',
1113                'f0d70456ce424fda2cecd48e64f3a151',
1114                '336accdb56acbbeff8dac1748d15ceb8',
1115                '27089f3baaf881d96f6a9da202e3d69b']
1116    if with_raw:
1117        fname_list.extend(['4D_dwi_B200.nii.gz', '4D_dwi_B400.nii.gz',
1118                           '4D_dwi_B1000.nii.gz', '4D_dwi_B2000.nii.gz',
1119                           '4D_dwi_B3000.nii.gz'])
1120        md5_list.extend(['a8c36e76101f2da2ca8119474ded21d5',
1121                         'a0e7939f6d977458afbb2f4659062a79',
1122                         '87fc307bdc2e56e105dffc81b711a808',
1123                         '7c23e8a5198624aa29455f0578025d4f',
1124                         '4e4324c676f5a97b3ded8bbb100bf6e5'])
1125
1126    files = {}
1127    baseurl = UW_RW_URL + '1773/33311/'
1128    for f, m in zip(fname_list, md5_list):
1129        files[f] = (baseurl + f, m)
1130
1131    fetch_data(files, folder)
1132    return files, folder
1133
1134
1135def read_cenir_multib(bvals=None):
1136    """Read CENIR multi b-value data.
1137
1138    Parameters
1139    ----------
1140    bvals : list or int
1141        The b-values to read from file (200, 400, 1000, 2000, 3000).
1142
1143    Returns
1144    -------
1145    gtab : a GradientTable class instance
1146    img : nibabel.Nifti1Image
1147
1148    """
1149    files, folder = fetch_cenir_multib(with_raw=False)
1150    if bvals is None:
1151        bvals = [200, 400, 1000, 2000, 3000]
1152    if isinstance(bvals, int):
1153        bvals = [bvals]
1154    file_dict = {200: {'DWI': pjoin(folder, '4D_dwi_eddycor_B200.nii.gz'),
1155                       'bvals': pjoin(folder, 'dwi_bvals_B200'),
1156                       'bvecs': pjoin(folder, 'dwi_bvecs_B200')},
1157                 400: {'DWI': pjoin(folder, '4D_dwieddycor_B400.nii.gz'),
1158                       'bvals': pjoin(folder, 'bvals_B400'),
1159                       'bvecs': pjoin(folder, 'bvecs_B400')},
1160                 1000: {'DWI': pjoin(folder, '4D_dwieddycor_B1000.nii.gz'),
1161                        'bvals': pjoin(folder, 'bvals_B1000'),
1162                        'bvecs': pjoin(folder, 'bvecs_B1000')},
1163                 2000: {'DWI': pjoin(folder, '4D_dwieddycor_B2000.nii.gz'),
1164                        'bvals': pjoin(folder, 'bvals_B2000'),
1165                        'bvecs': pjoin(folder, 'bvecs_B2000')},
1166                 3000: {'DWI': pjoin(folder, '4D_dwieddycor_B3000.nii.gz'),
1167                        'bvals': pjoin(folder, 'bvals_B3000'),
1168                        'bvecs': pjoin(folder, 'bvecs_B3000')}}
1169    data = []
1170    bval_list = []
1171    bvec_list = []
1172    for bval in bvals:
1173        data.append(load_nifti_data(file_dict[bval]['DWI']))
1174        bval_list.extend(np.loadtxt(file_dict[bval]['bvals']))
1175        bvec_list.append(np.loadtxt(file_dict[bval]['bvecs']))
1176
1177    # All affines are the same, so grab the last one:
1178    aff = nib.load(file_dict[bval]['DWI']).affine
1179    return (nib.Nifti1Image(np.concatenate(data, -1), aff),
1180            gradient_table(bval_list, np.concatenate(bvec_list, -1)))
1181
1182
1183CENIR_notes = \
1184    """
1185    Notes
1186    -----
1187    Details of the acquisition and processing, and additional meta-data are
1188    available through UW researchworks:
1189
1190    https://digital.lib.washington.edu/researchworks/handle/1773/33311
1191    """
1192
1193fetch_cenir_multib.__doc__ += CENIR_notes
1194read_cenir_multib.__doc__ += CENIR_notes
1195
1196
1197def read_bundles_2_subjects(subj_id='subj_1', metrics=['fa'],
1198                            bundles=['af.left', 'cst.right', 'cc_1']):
1199    r"""Read images and streamlines from 2 subjects of the SNAIL dataset.
1200
1201    Parameters
1202    ----------
1203    subj_id : string
1204        Either ``subj_1`` or ``subj_2``.
1205    metrics : list
1206        Either ['fa'] or ['t1'] or ['fa', 't1']
1207    bundles : list
1208        E.g., ['af.left', 'cst.right', 'cc_1']. See all the available bundles
1209        in the ``exp_bundles_maps/bundles_2_subjects`` directory of your
1210        ``$HOME/.dipy`` folder.
1211
1212    Returns
1213    -------
1214    dix : dict
1215        Dictionary with data of the metrics and the bundles as keys.
1216
1217    Notes
1218    -----
1219    If you are using these datasets please cite the following publications.
1220
1221    References
1222    ----------
1223    .. [1] Renauld, E., M. Descoteaux, M. Bernier, E. Garyfallidis,
1224    K. Whittingstall, "Morphology of thalamus, LGN and optic radiation do not
1225    influence EEG alpha waves", Plos One (under submission), 2015.
1226
1227    .. [2] Garyfallidis, E., O. Ocegueda, D. Wassermann,
1228    M. Descoteaux. Robust and efficient linear registration of fascicles in the
1229    space of streamlines , Neuroimage, 117:124-140, 2015.
1230
1231    """
1232    dname = pjoin(dipy_home, 'exp_bundles_and_maps', 'bundles_2_subjects')
1233
1234    from dipy.io.streamline import load_tractogram
1235    from dipy.tracking.streamline import Streamlines
1236
1237    res = {}
1238
1239    if 't1' in metrics:
1240        data, affine = load_nifti(pjoin(dname, subj_id, 't1_warped.nii.gz'))
1241        res['t1'] = data
1242
1243    if 'fa' in metrics:
1244        fa, affine = load_nifti(pjoin(dname, subj_id, 'fa_1x1x1.nii.gz'))
1245        res['fa'] = fa
1246
1247    res['affine'] = affine
1248
1249    for bun in bundles:
1250
1251        streams = load_tractogram(pjoin(dname, subj_id,
1252                                        'bundles', 'bundles_' + bun + '.trk'),
1253                                  'same',
1254                                  bbox_valid_check=False).streamlines
1255
1256        streamlines = Streamlines(streams)
1257        res[bun] = streamlines
1258
1259    return res
1260
1261
1262def read_ivim():
1263    """Load IVIM dataset.
1264
1265    Returns
1266    -------
1267    img : obj,
1268        Nifti1Image
1269    gtab : obj,
1270        GradientTable
1271
1272    """
1273    fraw, fbval, fbvec = get_fnames('ivim')
1274    bvals, bvecs = read_bvals_bvecs(fbval, fbvec)
1275    gtab = gradient_table(bvals, bvecs, b0_threshold=0)
1276    img = nib.load(fraw)
1277    return img, gtab
1278
1279
1280def read_cfin_dwi():
1281    """Load CFIN multi b-value DWI data.
1282
1283    Returns
1284    -------
1285    img : obj,
1286        Nifti1Image
1287    gtab : obj,
1288        GradientTable
1289
1290    """
1291    fraw, fbval, fbvec, _ = get_fnames('cfin_multib')
1292    bvals, bvecs = read_bvals_bvecs(fbval, fbvec)
1293    gtab = gradient_table(bvals, bvecs)
1294    img = nib.load(fraw)
1295    return img, gtab
1296
1297
1298def read_cfin_t1():
1299    """Load CFIN T1-weighted data.
1300
1301    Returns
1302    -------
1303    img : obj,
1304        Nifti1Image
1305
1306    """
1307    _, _, _, fraw = get_fnames('cfin_multib')
1308    img = nib.load(fraw)
1309    return img  # , gtab
1310
1311
1312def get_file_formats():
1313    """
1314
1315    Returns
1316    -------
1317    bundles_list : all bundles (list)
1318    ref_anat : reference
1319    """
1320    ref_anat = pjoin(dipy_home,
1321                     'bundle_file_formats_example', 'template0.nii.gz')
1322    bundles_list = []
1323    for filename in ['cc_m_sub.trk', 'laf_m_sub.tck', 'lpt_m_sub.fib',
1324                     'raf_m_sub.vtk', 'rpt_m_sub.dpy']:
1325        bundles_list.append(pjoin(dipy_home,
1326                                  'bundle_file_formats_example',
1327                                  filename))
1328
1329    return bundles_list, ref_anat
1330
1331
1332def get_bundle_atlas_hcp842():
1333    """
1334    Returns
1335    -------
1336    file1 : string
1337    file2 : string
1338    """
1339    file1 = pjoin(dipy_home,
1340                  'bundle_atlas_hcp842',
1341                  'Atlas_80_Bundles',
1342                  'whole_brain',
1343                  'whole_brain_MNI.trk')
1344
1345    file2 = pjoin(dipy_home,
1346                  'bundle_atlas_hcp842',
1347                  'Atlas_80_Bundles',
1348                  'bundles',
1349                  '*.trk')
1350
1351    return file1, file2
1352
1353
1354def get_two_hcp842_bundles():
1355    """
1356    Returns
1357    -------
1358    file1 : string
1359    file2 : string
1360    """
1361    file1 = pjoin(dipy_home,
1362                  'bundle_atlas_hcp842',
1363                  'Atlas_80_Bundles',
1364                  'bundles',
1365                  'AF_L.trk')
1366
1367    file2 = pjoin(dipy_home,
1368                  'bundle_atlas_hcp842',
1369                  'Atlas_80_Bundles',
1370                  'bundles',
1371                  'CST_L.trk')
1372
1373    return file1, file2
1374
1375
1376def get_target_tractogram_hcp():
1377    """
1378    Returns
1379    -------
1380    file1 : string
1381    """
1382    file1 = pjoin(dipy_home,
1383                  'target_tractogram_hcp',
1384                  'hcp_tractogram',
1385                  'streamlines.trk')
1386
1387    return file1
1388