1""" 2First level analysis of a complete BIDS dataset from openneuro 3=============================================================== 4 5 6Full step-by-step example of fitting a GLM to perform a first level analysis 7in an openneuro :term:`BIDS` dataset. We demonstrate how :term:`BIDS` 8derivatives can be exploited to perform a simple one subject analysis with 9minimal code. Details about the :term:`BIDS` standard are available at 10`http://bids.neuroimaging.io/ <http://bids.neuroimaging.io/>`_. 11We also demonstrate how to download individual groups of files from the 12Openneuro s3 bucket. 13 14More specifically: 15 161. Download an :term:`fMRI` :term:`BIDS` dataset with derivatives from openneuro. 172. Extract first level model objects automatically from the :term:`BIDS` dataset. 183. Demonstrate Quality assurance of Nistats estimation against available FSL. 19 estimation in the openneuro dataset. 204. Display contrast plot and uncorrected first level statistics table report. 21 22 23 24To run this example, you must launch IPython via ``ipython 25--matplotlib`` in a terminal, or use the Jupyter notebook. 26 27.. contents:: **Contents** 28 :local: 29 :depth: 1 30""" 31############################################################################## 32# Fetch openneuro BIDS dataset 33# ----------------------------- 34# We download one subject from the stopsignal task in the ds000030 V4 :term:`BIDS` 35# dataset available in openneuro. 36# This dataset contains the necessary information to run a statistical analysis 37# using Nilearn. The dataset also contains statistical results from a previous 38# FSL analysis that we can employ for comparison with the Nilearn estimation. 39from nilearn.datasets import (fetch_openneuro_dataset_index, 40 fetch_openneuro_dataset, select_from_index) 41 42_, urls = fetch_openneuro_dataset_index() 43 44exclusion_patterns = ['*group*', '*phenotype*', '*mriqc*', 45 '*parameter_plots*', '*physio_plots*', 46 '*space-fsaverage*', '*space-T1w*', 47 '*dwi*', '*beh*', '*task-bart*', 48 '*task-rest*', '*task-scap*', '*task-task*'] 49urls = select_from_index( 50 urls, exclusion_filters=exclusion_patterns, n_subjects=1) 51 52data_dir, _ = fetch_openneuro_dataset(urls=urls) 53 54############################################################################## 55# Obtain FirstLevelModel objects automatically and fit arguments 56# --------------------------------------------------------------- 57# From the dataset directory we automatically obtain FirstLevelModel objects 58# with their subject_id filled from the :term:`BIDS` dataset. Moreover we obtain, 59# for each model, the list of run images and their respective events and 60# confound regressors. Those are inferred from the confounds.tsv files 61# available in the :term:`BIDS` dataset. 62# To get the first level models we have to specify the dataset directory, 63# the task_label and the space_label as specified in the file names. 64# We also have to provide the folder with the desired derivatives, that in this 65# case were produced by the fmriprep :term:`BIDS` app. 66from nilearn.glm.first_level import first_level_from_bids 67task_label = 'stopsignal' 68space_label = 'MNI152NLin2009cAsym' 69derivatives_folder = 'derivatives/fmriprep' 70models, models_run_imgs, models_events, models_confounds = \ 71 first_level_from_bids(data_dir, task_label, space_label, 72 smoothing_fwhm=5.0, 73 derivatives_folder=derivatives_folder) 74 75############################################################################# 76# Access the model and model arguments of the subject and process events. 77model, imgs, events, confounds = ( 78 models[0], models_run_imgs[0], models_events[0], models_confounds[0]) 79subject = 'sub-' + model.subject_label 80 81import os 82from nilearn._utils.glm import get_design_from_fslmat 83fsl_design_matrix_path = os.path.join( 84 data_dir, 'derivatives', 'task', subject, 'stopsignal.feat', 'design.mat') 85design_matrix = get_design_from_fslmat( 86 fsl_design_matrix_path, column_names=None) 87 88############################################################################# 89# We identify the columns of the Go and StopSuccess conditions of the 90# design matrix inferred from the FSL file, to use them later for contrast 91# definition. 92design_columns = ['cond_%02d' % i for i in range(len(design_matrix.columns))] 93design_columns[0] = 'Go' 94design_columns[4] = 'StopSuccess' 95design_matrix.columns = design_columns 96 97############################################################################ 98# First level model estimation (one subject) 99# ------------------------------------------- 100# We fit the first level model for one subject. 101model.fit(imgs, design_matrices=[design_matrix]) 102 103############################################################################# 104# Then we compute the StopSuccess - Go contrast. We can use the column names 105# of the design matrix. 106z_map = model.compute_contrast('StopSuccess - Go') 107 108############################################################################# 109# We show the agreement between the Nilearn estimation and the FSL estimation 110# available in the dataset. 111import nibabel as nib 112fsl_z_map = nib.load( 113 os.path.join(data_dir, 'derivatives', 'task', subject, 'stopsignal.feat', 114 'stats', 'zstat12.nii.gz')) 115 116from nilearn import plotting 117import matplotlib.pyplot as plt 118from scipy.stats import norm 119plotting.plot_glass_brain(z_map, colorbar=True, threshold=norm.isf(0.001), 120 title='Nilearn Z map of "StopSuccess - Go" (unc p<0.001)', 121 plot_abs=False, display_mode='ortho') 122plotting.plot_glass_brain(fsl_z_map, colorbar=True, threshold=norm.isf(0.001), 123 title='FSL Z map of "StopSuccess - Go" (unc p<0.001)', 124 plot_abs=False, display_mode='ortho') 125plt.show() 126 127from nilearn.plotting import plot_img_comparison 128plot_img_comparison([z_map], [fsl_z_map], model.masker_, 129 ref_label='Nilearn', src_label='FSL') 130plt.show() 131 132############################################################################# 133# Simple statistical report of thresholded contrast 134# ----------------------------------------------------- 135# We display the contrast plot and table with cluster information 136from nilearn.plotting import plot_contrast_matrix 137plot_contrast_matrix('StopSuccess - Go', design_matrix) 138plotting.plot_glass_brain(z_map, colorbar=True, threshold=norm.isf(0.001), 139 plot_abs=False, display_mode='z', 140 figure=plt.figure(figsize=(4, 4))) 141plt.show() 142 143############################################################################### 144# We can get a latex table from a Pandas Dataframe for display and publication 145# purposes 146from nilearn.reporting import get_clusters_table 147print(get_clusters_table(z_map, norm.isf(0.001), 10).to_latex()) 148 149######################################################################### 150# Generating a report 151# ------------------- 152# Using the computed FirstLevelModel and contrast information, 153# we can quickly create a summary report. 154 155from nilearn.reporting import make_glm_report 156 157report = make_glm_report(model=model, 158 contrasts='StopSuccess - Go', 159 ) 160 161######################################################################### 162# We have several ways to access the report: 163 164# report # This report can be viewed in a notebook 165# report.save_as_html('report.html') 166# report.open_in_browser() 167