Skip to content

Commit 34cb132

Browse files
committed
0.10.0
1 parent 1e3caa7 commit 34cb132

File tree

1,784 files changed

+556321
-0
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,784 files changed

+556321
-0
lines changed
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
"""
2+
Visualizing multiscale functional brain parcellations
3+
=====================================================
4+
5+
This example shows how to download and fetch brain parcellations of
6+
multiple networks using :func:`nilearn.datasets.fetch_atlas_basc_multiscale_2015`
7+
and visualize them using plotting function :func:`nilearn.plotting.plot_roi`.
8+
9+
We show here only three different networks of 'symmetric' version. For more
10+
details about different versions and different networks, please refer to its
11+
documentation.
12+
"""
13+
14+
###############################################################################
15+
# Retrieving multiscale group brain parcellations
16+
# -----------------------------------------------
17+
18+
# import datasets module and use `fetch_atlas_basc_multiscale_2015` function
19+
from nilearn import datasets
20+
21+
parcellations = datasets.fetch_atlas_basc_multiscale_2015(version='sym')
22+
23+
# We show here networks of 64, 197, 444
24+
networks_64 = parcellations['scale064']
25+
networks_197 = parcellations['scale197']
26+
networks_444 = parcellations['scale444']
27+
28+
###############################################################################
29+
# Visualizing brain parcellations
30+
# -------------------------------
31+
32+
# import plotting module and use `plot_roi` function, since the maps are in 3D
33+
from nilearn import plotting
34+
35+
# The coordinates of all plots are selected automatically by itself
36+
# We manually change the colormap of our choice
37+
plotting.plot_roi(networks_64, cmap=plotting.cm.bwr,
38+
title='64 regions of brain clusters')
39+
40+
plotting.plot_roi(networks_197, cmap=plotting.cm.bwr,
41+
title='197 regions of brain clusters')
42+
43+
plotting.plot_roi(networks_444, cmap=plotting.cm.bwr_r,
44+
title='444 regions of brain clusters')
45+
46+
plotting.show()
Lines changed: 243 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,243 @@
1+
"""
2+
=================================================
3+
Example of pattern recognition on simulated data
4+
=================================================
5+
6+
This example simulates data according to a very simple sketch of brain
7+
imaging data and applies machine learning techniques to predict output
8+
values.
9+
10+
We use a very simple generating function to simulate data, as in `Michel
11+
et al. 2012 <http://dx.doi.org/10.1109/TMI.2011.2113378>`_ , a linear
12+
model with a random design matrix **X**:
13+
14+
.. math::
15+
16+
\\mathbf{y} = \\mathbf{X} \\mathbf{w} + \\mathbf{e}
17+
18+
* **w**: the weights of the linear model correspond to the predictive
19+
brain regions. Here, in the simulations, they form a 3D image with 5, four
20+
of which in opposite corners and one in the middle, as plotted below.
21+
22+
* **X**: the design matrix corresponds to the observed fMRI data. Here
23+
we simulate random normal variables and smooth them as in Gaussian
24+
fields.
25+
26+
* **e** is random normal noise.
27+
28+
29+
"""
30+
31+
# Licence : BSD
32+
33+
print(__doc__)
34+
35+
from time import time
36+
37+
import numpy as np
38+
import matplotlib.pyplot as plt
39+
from scipy import linalg
40+
from scipy.ndimage import gaussian_filter
41+
42+
from sklearn import linear_model, svm
43+
from sklearn.utils import check_random_state
44+
from sklearn.model_selection import KFold
45+
from sklearn.feature_selection import f_regression
46+
from sklearn.preprocessing import StandardScaler
47+
from sklearn.pipeline import make_pipeline
48+
49+
import nibabel
50+
51+
from nilearn import decoding
52+
import nilearn.masking
53+
from nilearn.plotting import show
54+
55+
56+
##############################################################################
57+
# A function to generate data
58+
##############################################################################
59+
def create_simulation_data(snr=0, n_samples=2 * 100, size=12, random_state=1):
60+
generator = check_random_state(random_state)
61+
roi_size = 2 # size / 3
62+
smooth_X = 1
63+
# Coefs
64+
w = np.zeros((size, size, size))
65+
w[0:roi_size, 0:roi_size, 0:roi_size] = -0.6
66+
w[-roi_size:, -roi_size:, 0:roi_size] = 0.5
67+
w[0:roi_size, -roi_size:, -roi_size:] = -0.6
68+
w[-roi_size:, 0:roi_size:, -roi_size:] = 0.5
69+
w[(size - roi_size) // 2:(size + roi_size) // 2,
70+
(size - roi_size) // 2:(size + roi_size) // 2,
71+
(size - roi_size) // 2:(size + roi_size) // 2] = 0.5
72+
w = w.ravel()
73+
# Generate smooth background noise
74+
XX = generator.randn(n_samples, size, size, size)
75+
noise = []
76+
for i in range(n_samples):
77+
Xi = gaussian_filter(XX[i, :, :, :], smooth_X)
78+
Xi = Xi.ravel()
79+
noise.append(Xi)
80+
noise = np.array(noise)
81+
# Generate the signal y
82+
y = generator.randn(n_samples)
83+
X = np.dot(y[:, np.newaxis], w[np.newaxis])
84+
norm_noise = linalg.norm(X, 2) / np.exp(snr / 20.)
85+
noise_coef = norm_noise / linalg.norm(noise, 2)
86+
noise *= noise_coef
87+
snr = 20 * np.log(linalg.norm(X, 2) / linalg.norm(noise, 2))
88+
print("SNR: %.1f dB" % snr)
89+
# Mixing of signal + noise and splitting into train/test
90+
X += noise
91+
X -= X.mean(axis=-1)[:, np.newaxis]
92+
X /= X.std(axis=-1)[:, np.newaxis]
93+
X_test = X[n_samples // 2:, :]
94+
X_train = X[:n_samples // 2, :]
95+
y_test = y[n_samples // 2:]
96+
y = y[:n_samples // 2]
97+
98+
return X_train, X_test, y, y_test, snr, w, size
99+
100+
101+
##############################################################################
102+
# A simple function to plot slices
103+
##############################################################################
104+
def plot_slices(data, title=None):
105+
plt.figure(figsize=(5.5, 2.2))
106+
vmax = np.abs(data).max()
107+
for i in (0, 6, 11):
108+
plt.subplot(1, 3, i // 5 + 1)
109+
plt.imshow(data[:, :, i], vmin=-vmax, vmax=vmax,
110+
interpolation="nearest", cmap=plt.cm.RdBu_r)
111+
plt.xticks(())
112+
plt.yticks(())
113+
plt.subplots_adjust(hspace=0.05, wspace=0.05, left=.03, right=.97, top=.9)
114+
if title is not None:
115+
plt.suptitle(title, y=.95)
116+
117+
118+
###############################################################################
119+
# Create data
120+
###############################################################################
121+
X_train, X_test, y_train, y_test, snr, coefs, size = \
122+
create_simulation_data(snr=-10, n_samples=100, size=12)
123+
124+
# Create masks for SearchLight. process_mask is the voxels where SearchLight
125+
# computation is performed. It is a subset of the brain mask, just to reduce
126+
# computation time.
127+
mask = np.ones((size, size, size), dtype=bool)
128+
mask_img = nibabel.Nifti1Image(mask.astype("uint8"), np.eye(4))
129+
process_mask = np.zeros((size, size, size), dtype=bool)
130+
process_mask[:, :, 0] = True
131+
process_mask[:, :, 6] = True
132+
process_mask[:, :, 11] = True
133+
process_mask_img = nibabel.Nifti1Image(process_mask.astype("uint8"), np.eye(4))
134+
135+
coefs = np.reshape(coefs, [size, size, size])
136+
plot_slices(coefs, title="Ground truth")
137+
138+
###############################################################################
139+
# Run different estimators
140+
###############################################################################
141+
#
142+
# We can now run different estimators and look at their prediction score,
143+
# as well as the feature maps that they recover. Namely, we will use
144+
#
145+
# * A support vector regression (`SVM
146+
# <http://scikit-learn.org/stable/modules/svm.html>`_)
147+
#
148+
# * An `elastic-net
149+
# <http://scikit-learn.org/stable/modules/linear_model.html#elastic-net>`_
150+
#
151+
# * A *Bayesian* ridge estimator, i.e. a ridge estimator that sets its
152+
# parameter according to a metaprior
153+
#
154+
# * A ridge estimator that set its parameter by cross-validation
155+
#
156+
# Note that the `RidgeCV` and the `ElasticNetCV` have names ending in `CV`
157+
# that stands for `cross-validation`: in the list of possible `alpha`
158+
# values that they are given, they choose the best by cross-validation.
159+
160+
bayesian_ridge = make_pipeline(
161+
StandardScaler(), linear_model.BayesianRidge()
162+
)
163+
164+
estimators = [
165+
('bayesian_ridge', bayesian_ridge),
166+
('enet_cv', linear_model.ElasticNetCV(alphas=[5, 1, 0.5, 0.1],
167+
l1_ratio=0.05)),
168+
('ridge_cv', linear_model.RidgeCV(alphas=[100, 10, 1, 0.1], cv=5)),
169+
('svr', svm.SVR(kernel='linear', C=0.001)),
170+
('searchlight', decoding.SearchLight(mask_img,
171+
process_mask_img=process_mask_img,
172+
radius=2.7,
173+
scoring='r2',
174+
estimator=svm.SVR(kernel="linear"),
175+
cv=KFold(n_splits=4),
176+
verbose=1,
177+
n_jobs=1,
178+
)
179+
)
180+
]
181+
182+
###############################################################################
183+
# Run the estimators
184+
#
185+
# As the estimators expose a fairly consistent API, we can all fit them in
186+
# a for loop: they all have a `fit` method for fitting the data, a `score`
187+
# method to retrieve the prediction score, and because they are all linear
188+
# models, a `coef_` attribute that stores the coefficients **w** estimated
189+
190+
for name, estimator in estimators:
191+
t1 = time()
192+
if name != "searchlight":
193+
estimator.fit(X_train, y_train)
194+
else:
195+
X = nilearn.masking.unmask(X_train, mask_img)
196+
estimator.fit(X, y_train)
197+
del X
198+
elapsed_time = time() - t1
199+
200+
if name != 'searchlight':
201+
if name == 'bayesian_ridge':
202+
coefs = estimator.named_steps['bayesianridge'].coef_
203+
else:
204+
coefs = estimator.coef_
205+
coefs = np.reshape(coefs, [size, size, size])
206+
score = estimator.score(X_test, y_test)
207+
title = '%s: prediction score %.3f, training time: %.2fs' % (
208+
name, score, elapsed_time)
209+
210+
else: # Searchlight
211+
coefs = estimator.scores_
212+
title = '%s: training time: %.2fs' % (
213+
estimator.__class__.__name__,
214+
elapsed_time)
215+
216+
# We use the plot_slices function provided in the example to
217+
# plot the results
218+
plot_slices(coefs, title=title)
219+
220+
print(title)
221+
222+
f_values, p_values = f_regression(X_train, y_train)
223+
p_values = np.reshape(p_values, (size, size, size))
224+
p_values = -np.log10(p_values)
225+
p_values[np.isnan(p_values)] = 0
226+
p_values[p_values > 10] = 10
227+
plot_slices(p_values, title="f_regress")
228+
229+
show()
230+
231+
###############################################################################
232+
# An exercice to go further
233+
###############################################################################
234+
#
235+
# As an exercice, you can use recursive feature elimination (RFE) with
236+
# the SVM
237+
#
238+
# Read the object's documentation to find out how to use RFE.
239+
#
240+
# **Performance tip**: increase the `step` parameter, or it will be very
241+
# slow.
242+
243+
from sklearn.feature_selection import RFE

0 commit comments

Comments
 (0)