|
| 1 | +""" |
| 2 | +.. _ex-publication-figure: |
| 3 | +
|
| 4 | +=================================== |
| 5 | +Make figures more publication ready |
| 6 | +=================================== |
| 7 | +
|
| 8 | +In this example, we take some MNE plots and make some changes to make |
| 9 | +a figure closer to publication-ready. |
| 10 | +""" |
| 11 | + |
| 12 | +# Authors: Eric Larson <larson.eric.d@gmail.com> |
| 13 | +# Daniel McCloy <dan.mccloy@gmail.com> |
| 14 | +# |
| 15 | +# License: BSD (3-clause) |
| 16 | + |
| 17 | +import os.path as op |
| 18 | + |
| 19 | +import numpy as np |
| 20 | +import matplotlib.pyplot as plt |
| 21 | +from mpl_toolkits.axes_grid1 import make_axes_locatable, ImageGrid |
| 22 | + |
| 23 | +import mne |
| 24 | + |
| 25 | +############################################################################### |
| 26 | +# Suppose we want a figure with an evoked plot on top, and the brain activation |
| 27 | +# below, with the brain subplot slightly bigger than the evoked plot. Let's |
| 28 | +# start by loading some :ref:`example data <sample-dataset>`. |
| 29 | + |
| 30 | +data_path = mne.datasets.sample.data_path() |
| 31 | +subjects_dir = op.join(data_path, 'subjects') |
| 32 | +fname_stc = op.join(data_path, 'MEG', 'sample', 'sample_audvis-meg-eeg-lh.stc') |
| 33 | +fname_evoked = op.join(data_path, 'MEG', 'sample', 'sample_audvis-ave.fif') |
| 34 | + |
| 35 | +evoked = mne.read_evokeds(fname_evoked, 'Left Auditory') |
| 36 | +evoked.pick_types(meg='grad').apply_baseline((None, 0.)) |
| 37 | +max_t = evoked.get_peak()[1] |
| 38 | + |
| 39 | +stc = mne.read_source_estimate(fname_stc) |
| 40 | + |
| 41 | +############################################################################### |
| 42 | +# During interactive plotting, we might see figures like this: |
| 43 | + |
| 44 | +evoked.plot() |
| 45 | + |
| 46 | +stc.plot(views='lat', hemi='split', size=(800, 400), subject='sample', |
| 47 | + subjects_dir=subjects_dir, initial_time=max_t) |
| 48 | + |
| 49 | +############################################################################### |
| 50 | +# To make a publication-ready figure, first we'll re-plot the brain on a white |
| 51 | +# background, take a screenshot of it, and then crop out the white margins. |
| 52 | +# While we're at it, let's change the colormap, set custom colormap limits, and |
| 53 | +# remove the default colorbar (so we can add a smaller, vertical one later): |
| 54 | + |
| 55 | +colormap = 'viridis' |
| 56 | +clim = dict(kind='value', lims=[4, 8, 12]) |
| 57 | + |
| 58 | +# Plot the STC, get the brain image, crop it |
| 59 | +brain = stc.plot(views='lat', hemi='split', size=(800, 400), subject='sample', |
| 60 | + subjects_dir=subjects_dir, initial_time=max_t, |
| 61 | + background='w', colorbar=False, clim=clim, colormap=colormap) |
| 62 | +screenshot = brain.screenshot() |
| 63 | +brain.close() |
| 64 | + |
| 65 | +############################################################################### |
| 66 | +# Now let's crop out the white margins and the white gap between hemispheres. |
| 67 | +# The screenshot has dimensions ``(h, w, 3)``, with the last axis being R, G, B |
| 68 | +# values for each pixel, encoded as integers between ``0`` and ``255``. ``(255, |
| 69 | +# 255, 255)`` encodes a white pixel, so we'll detect any pixels that differ |
| 70 | +# from that: |
| 71 | + |
| 72 | +nonwhite_pix = (screenshot != 255).any(-1) |
| 73 | +nonwhite_row = nonwhite_pix.any(1) |
| 74 | +nonwhite_col = nonwhite_pix.any(0) |
| 75 | +cropped_screenshot = screenshot[nonwhite_row][:, nonwhite_col] |
| 76 | + |
| 77 | +# before/after results |
| 78 | +fig = plt.figure(figsize=(4, 4)) |
| 79 | +axes = ImageGrid(fig, 111, nrows_ncols=(2, 1), axes_pad=0.5) |
| 80 | +for ax, image, title in zip(axes, [screenshot, cropped_screenshot], |
| 81 | + ['Before', 'After']): |
| 82 | + ax.imshow(image) |
| 83 | + ax.set_title('{} cropping'.format(title)) |
| 84 | + |
| 85 | +############################################################################### |
| 86 | +# A lot of figure settings can be adjusted after the figure is created, but |
| 87 | +# many can also be adjusted in advance by updating the |
| 88 | +# :data:`~matplotlib.rcParams` dictionary. This is especially useful when your |
| 89 | +# script generates several figures that you want to all have the same style: |
| 90 | + |
| 91 | +# Tweak the figure style |
| 92 | +plt.rcParams.update({ |
| 93 | + 'ytick.labelsize': 'small', |
| 94 | + 'xtick.labelsize': 'small', |
| 95 | + 'axes.labelsize': 'small', |
| 96 | + 'axes.titlesize': 'medium', |
| 97 | + 'grid.color': '0.75', |
| 98 | + 'grid.linestyle': ':', |
| 99 | +}) |
| 100 | + |
| 101 | +############################################################################### |
| 102 | +# Now let's create our custom figure. There are lots of ways to do this step. |
| 103 | +# Here we'll create the figure and the subplot axes in one step, specifying |
| 104 | +# overall figure size, number and arrangement of subplots, and the ratio of |
| 105 | +# subplot heights for each row using :mod:`GridSpec keywords |
| 106 | +# <matplotlib.gridspec>`. Other approaches (using |
| 107 | +# :func:`~matplotlib.pyplot.subplot2grid`, or adding each axes manually) are |
| 108 | +# shown commented out, for reference. |
| 109 | + |
| 110 | +# sphinx_gallery_thumbnail_number = 5 |
| 111 | +# figsize unit is inches |
| 112 | +fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(4.5, 3.), |
| 113 | + gridspec_kw=dict(height_ratios=[3, 4])) |
| 114 | + |
| 115 | +# alternate way #1: using subplot2grid |
| 116 | +# fig = plt.figure(figsize=(4.5, 3.)) |
| 117 | +# axes = [plt.subplot2grid((7, 1), (0, 0), rowspan=3), |
| 118 | +# plt.subplot2grid((7, 1), (3, 0), rowspan=4)] |
| 119 | + |
| 120 | +# alternate way #2: using figure-relative coordinates |
| 121 | +# fig = plt.figure(figsize=(4.5, 3.)) |
| 122 | +# axes = [fig.add_axes([0.125, 0.58, 0.775, 0.3]), # left, bot., width, height |
| 123 | +# fig.add_axes([0.125, 0.11, 0.775, 0.4])] |
| 124 | + |
| 125 | +# we'll put the evoked plot in the upper axes, and the brain below |
| 126 | +evoked_idx = 0 |
| 127 | +brain_idx = 1 |
| 128 | + |
| 129 | +# plot the evoked in the desired subplot, and add a line at peak activation |
| 130 | +evoked.plot(axes=axes[evoked_idx]) |
| 131 | +peak_line = axes[evoked_idx].axvline(max_t, color='#66CCEE', ls='--') |
| 132 | +# custom legend |
| 133 | +axes[evoked_idx].legend( |
| 134 | + [axes[evoked_idx].lines[0], peak_line], ['MEG data', 'Peak time'], |
| 135 | + frameon=True, columnspacing=0.1, labelspacing=0.1, |
| 136 | + fontsize=8, fancybox=True, handlelength=1.8) |
| 137 | +# remove the "N_ave" annotation |
| 138 | +axes[evoked_idx].texts = [] |
| 139 | +# Remove spines and add grid |
| 140 | +axes[evoked_idx].grid(True) |
| 141 | +axes[evoked_idx].set_axisbelow(True) |
| 142 | +for key in ('top', 'right'): |
| 143 | + axes[evoked_idx].spines[key].set(visible=False) |
| 144 | +# Tweak the ticks and limits |
| 145 | +axes[evoked_idx].set( |
| 146 | + yticks=np.arange(-200, 201, 100), xticks=np.arange(-0.2, 0.51, 0.1)) |
| 147 | +axes[evoked_idx].set( |
| 148 | + ylim=[-225, 225], xlim=[-0.2, 0.5]) |
| 149 | + |
| 150 | +# now add the brain to the lower axes |
| 151 | +axes[brain_idx].imshow(cropped_screenshot) |
| 152 | +axes[brain_idx].axis('off') |
| 153 | +# add a vertical colorbar with the same properties as the 3D one |
| 154 | +divider = make_axes_locatable(axes[brain_idx]) |
| 155 | +cax = divider.append_axes('right', size='5%', pad=0.2) |
| 156 | +cbar = mne.viz.plot_brain_colorbar(cax, clim, colormap, label='Activation (F)') |
| 157 | + |
| 158 | +# tweak margins and spacing |
| 159 | +fig.subplots_adjust( |
| 160 | + left=0.15, right=0.9, bottom=0.01, top=0.9, wspace=0.1, hspace=0.5) |
| 161 | + |
| 162 | +# add subplot labels |
| 163 | +for ax, label in zip(axes, 'AB'): |
| 164 | + ax.text(0.03, ax.get_position().ymax, label, transform=fig.transFigure, |
| 165 | + fontsize=12, fontweight='bold', va='top', ha='left') |
0 commit comments