Skip to content

Commit 305db6e

Browse files
committed
add 0.8
1 parent 373c187 commit 305db6e

File tree

3,096 files changed

+494638
-0
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

3,096 files changed

+494638
-0
lines changed
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
"""
2+
Visualizing multiscale functional brain parcellations
3+
=====================================================
4+
5+
This example shows how to download and fetch brain parcellations of
6+
multiple networks using :func:`nilearn.datasets.fetch_atlas_basc_multiscale_2015`
7+
and visualize them using plotting function :func:`nilearn.plotting.plot_roi`.
8+
9+
We show here only three different networks of 'symmetric' version. For more
10+
details about different versions and different networks, please refer to its
11+
documentation.
12+
"""
13+
14+
###############################################################################
15+
# Retrieving multiscale group brain parcellations
16+
# -----------------------------------------------
17+
18+
# import datasets module and use `fetch_atlas_basc_multiscale_2015` function
19+
from nilearn import datasets
20+
21+
parcellations = datasets.fetch_atlas_basc_multiscale_2015(version='sym')
22+
23+
# We show here networks of 64, 197, 444
24+
networks_64 = parcellations['scale064']
25+
networks_197 = parcellations['scale197']
26+
networks_444 = parcellations['scale444']
27+
28+
###############################################################################
29+
# Visualizing brain parcellations
30+
# -------------------------------
31+
32+
# import plotting module and use `plot_roi` function, since the maps are in 3D
33+
from nilearn import plotting
34+
35+
# The coordinates of all plots are selected automatically by itself
36+
# We manually change the colormap of our choice
37+
plotting.plot_roi(networks_64, cmap=plotting.cm.bwr,
38+
title='64 regions of brain clusters')
39+
40+
plotting.plot_roi(networks_197, cmap=plotting.cm.bwr,
41+
title='197 regions of brain clusters')
42+
43+
plotting.plot_roi(networks_444, cmap=plotting.cm.bwr_r,
44+
title='444 regions of brain clusters')
45+
46+
plotting.show()
Lines changed: 234 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,234 @@
1+
"""
2+
=================================================
3+
Example of pattern recognition on simulated data
4+
=================================================
5+
6+
This example simulates data according to a very simple sketch of brain
7+
imaging data and applies machine learning techniques to predict output
8+
values.
9+
10+
We use a very simple generating function to simulate data, as in `Michel
11+
et al. 2012 <http://dx.doi.org/10.1109/TMI.2011.2113378>`_ , a linear
12+
model with a random design matrix **X**:
13+
14+
.. math::
15+
16+
\\mathbf{y} = \\mathbf{X} \\mathbf{w} + \\mathbf{e}
17+
18+
* **w**: the weights of the linear model correspond to the predictive
19+
brain regions. Here, in the simulations, they form a 3D image with 5, four
20+
of which in opposite corners and one in the middle, as plotted below.
21+
22+
* **X**: the design matrix corresponds to the observed fMRI data. Here
23+
we simulate random normal variables and smooth them as in Gaussian
24+
fields.
25+
26+
* **e** is random normal noise.
27+
28+
29+
"""
30+
31+
# Licence : BSD
32+
33+
print(__doc__)
34+
35+
from time import time
36+
37+
import numpy as np
38+
import matplotlib.pyplot as plt
39+
from scipy import linalg, ndimage
40+
41+
from sklearn import linear_model, svm
42+
from sklearn.utils import check_random_state
43+
from sklearn.model_selection import KFold
44+
from sklearn.feature_selection import f_regression
45+
46+
import nibabel
47+
48+
from nilearn import decoding
49+
import nilearn.masking
50+
from nilearn.plotting import show
51+
52+
53+
##############################################################################
54+
# A function to generate data
55+
##############################################################################
56+
def create_simulation_data(snr=0, n_samples=2 * 100, size=12, random_state=1):
57+
generator = check_random_state(random_state)
58+
roi_size = 2 # size / 3
59+
smooth_X = 1
60+
# Coefs
61+
w = np.zeros((size, size, size))
62+
w[0:roi_size, 0:roi_size, 0:roi_size] = -0.6
63+
w[-roi_size:, -roi_size:, 0:roi_size] = 0.5
64+
w[0:roi_size, -roi_size:, -roi_size:] = -0.6
65+
w[-roi_size:, 0:roi_size:, -roi_size:] = 0.5
66+
w[(size - roi_size) // 2:(size + roi_size) // 2,
67+
(size - roi_size) // 2:(size + roi_size) // 2,
68+
(size - roi_size) // 2:(size + roi_size) // 2] = 0.5
69+
w = w.ravel()
70+
# Generate smooth background noise
71+
XX = generator.randn(n_samples, size, size, size)
72+
noise = []
73+
for i in range(n_samples):
74+
Xi = ndimage.filters.gaussian_filter(XX[i, :, :, :], smooth_X)
75+
Xi = Xi.ravel()
76+
noise.append(Xi)
77+
noise = np.array(noise)
78+
# Generate the signal y
79+
y = generator.randn(n_samples)
80+
X = np.dot(y[:, np.newaxis], w[np.newaxis])
81+
norm_noise = linalg.norm(X, 2) / np.exp(snr / 20.)
82+
noise_coef = norm_noise / linalg.norm(noise, 2)
83+
noise *= noise_coef
84+
snr = 20 * np.log(linalg.norm(X, 2) / linalg.norm(noise, 2))
85+
print("SNR: %.1f dB" % snr)
86+
# Mixing of signal + noise and splitting into train/test
87+
X += noise
88+
X -= X.mean(axis=-1)[:, np.newaxis]
89+
X /= X.std(axis=-1)[:, np.newaxis]
90+
X_test = X[n_samples // 2:, :]
91+
X_train = X[:n_samples // 2, :]
92+
y_test = y[n_samples // 2:]
93+
y = y[:n_samples // 2]
94+
95+
return X_train, X_test, y, y_test, snr, w, size
96+
97+
98+
##############################################################################
99+
# A simple function to plot slices
100+
##############################################################################
101+
def plot_slices(data, title=None):
102+
plt.figure(figsize=(5.5, 2.2))
103+
vmax = np.abs(data).max()
104+
for i in (0, 6, 11):
105+
plt.subplot(1, 3, i // 5 + 1)
106+
plt.imshow(data[:, :, i], vmin=-vmax, vmax=vmax,
107+
interpolation="nearest", cmap=plt.cm.RdBu_r)
108+
plt.xticks(())
109+
plt.yticks(())
110+
plt.subplots_adjust(hspace=0.05, wspace=0.05, left=.03, right=.97, top=.9)
111+
if title is not None:
112+
plt.suptitle(title, y=.95)
113+
114+
115+
###############################################################################
116+
# Create data
117+
###############################################################################
118+
X_train, X_test, y_train, y_test, snr, coefs, size = \
119+
create_simulation_data(snr=-10, n_samples=100, size=12)
120+
121+
# Create masks for SearchLight. process_mask is the voxels where SearchLight
122+
# computation is performed. It is a subset of the brain mask, just to reduce
123+
# computation time.
124+
mask = np.ones((size, size, size), dtype=bool)
125+
mask_img = nibabel.Nifti1Image(mask.astype(int), np.eye(4))
126+
process_mask = np.zeros((size, size, size), dtype=bool)
127+
process_mask[:, :, 0] = True
128+
process_mask[:, :, 6] = True
129+
process_mask[:, :, 11] = True
130+
process_mask_img = nibabel.Nifti1Image(process_mask.astype(int), np.eye(4))
131+
132+
coefs = np.reshape(coefs, [size, size, size])
133+
plot_slices(coefs, title="Ground truth")
134+
135+
###############################################################################
136+
# Run different estimators
137+
###############################################################################
138+
#
139+
# We can now run different estimators and look at their prediction score,
140+
# as well as the feature maps that they recover. Namely, we will use
141+
#
142+
# * A support vector regression (`SVM
143+
# <http://scikit-learn.org/stable/modules/svm.html>`_)
144+
#
145+
# * An `elastic-net
146+
# <http://scikit-learn.org/stable/modules/linear_model.html#elastic-net>`_
147+
#
148+
# * A *Bayesian* ridge estimator, i.e. a ridge estimator that sets its
149+
# parameter according to a metaprior
150+
#
151+
# * A ridge estimator that set its parameter by cross-validation
152+
#
153+
# Note that the `RidgeCV` and the `ElasticNetCV` have names ending in `CV`
154+
# that stands for `cross-validation`: in the list of possible `alpha`
155+
# values that they are given, they choose the best by cross-validation.
156+
157+
estimators = [
158+
('bayesian_ridge', linear_model.BayesianRidge(normalize=True)),
159+
('enet_cv', linear_model.ElasticNetCV(alphas=[5, 1, 0.5, 0.1],
160+
l1_ratio=0.05)),
161+
('ridge_cv', linear_model.RidgeCV(alphas=[100, 10, 1, 0.1], cv=5)),
162+
('svr', svm.SVR(kernel='linear', C=0.001)),
163+
('searchlight', decoding.SearchLight(mask_img,
164+
process_mask_img=process_mask_img,
165+
radius=2.7,
166+
scoring='r2',
167+
estimator=svm.SVR(kernel="linear"),
168+
cv=KFold(n_splits=4),
169+
verbose=1,
170+
n_jobs=1,
171+
)
172+
)
173+
]
174+
175+
###############################################################################
176+
# Run the estimators
177+
#
178+
# As the estimators expose a fairly consistent API, we can all fit them in
179+
# a for loop: they all have a `fit` method for fitting the data, a `score`
180+
# method to retrieve the prediction score, and because they are all linear
181+
# models, a `coef_` attribute that stores the coefficients **w** estimated
182+
183+
for name, estimator in estimators:
184+
t1 = time()
185+
if name != "searchlight":
186+
estimator.fit(X_train, y_train)
187+
else:
188+
X = nilearn.masking.unmask(X_train, mask_img)
189+
estimator.fit(X, y_train)
190+
del X
191+
elapsed_time = time() - t1
192+
193+
if name != 'searchlight':
194+
coefs = estimator.coef_
195+
coefs = np.reshape(coefs, [size, size, size])
196+
score = estimator.score(X_test, y_test)
197+
title = '%s: prediction score %.3f, training time: %.2fs' % (
198+
estimator.__class__.__name__, score,
199+
elapsed_time)
200+
201+
else: # Searchlight
202+
coefs = estimator.scores_
203+
title = '%s: training time: %.2fs' % (
204+
estimator.__class__.__name__,
205+
elapsed_time)
206+
207+
# We use the plot_slices function provided in the example to
208+
# plot the results
209+
plot_slices(coefs, title=title)
210+
211+
print(title)
212+
213+
f_values, p_values = f_regression(X_train, y_train)
214+
p_values = np.reshape(p_values, (size, size, size))
215+
p_values = -np.log10(p_values)
216+
p_values[np.isnan(p_values)] = 0
217+
p_values[p_values > 10] = 10
218+
plot_slices(p_values, title="f_regress")
219+
220+
show()
221+
222+
###############################################################################
223+
# An exercice to go further
224+
###############################################################################
225+
#
226+
# As an exercice, you can use recursive feature elimination (RFE) with
227+
# the SVM
228+
#
229+
# Read the object's documentation to find out how to use RFE.
230+
#
231+
# **Performance tip**: increase the `step` parameter, or it will be very
232+
# slow.
233+
234+
from sklearn.feature_selection import RFE

0 commit comments

Comments
 (0)