[med-svn] [mne-python] 01/03: Imported Upstream version 0.7~rc5

Andreas Tille tille at debian.org
Mon Nov 25 22:16:21 UTC 2013


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository mne-python.

commit 74f36b17633079ca649a8c06579193f88bd1c72b
Author: Andreas Tille <tille at debian.org>
Date:   Mon Nov 25 22:57:48 2013 +0100

    Imported Upstream version 0.7~rc5
---
 doc/source/getting_started.rst                     |    9 +-
 doc/source/python_reference.rst                    |    9 +
 doc/source/whats_new.rst                           |   22 +-
 .../connectivity/plot_cwt_sensor_connectivity.py   |    6 +-
 examples/datasets/plot_spm_faces_dataset.py        |    2 -
 examples/inverse/plot_gamma_map_inverse.py         |    3 -
 examples/inverse/plot_mixed_norm_L21_inverse.py    |    3 -
 .../plot_time_frequency_mixed_norm_inverse.py      |    3 -
 examples/plot_evoked_delayed_ssp.py                |    4 +-
 examples/plot_megsim_data.py                       |    2 -
 examples/plot_shift_evoked.py                      |   11 +-
 examples/plot_ssp_projs_topomaps.py                |   10 +-
 examples/plot_topo_channel_epochs_image.py         |    3 +-
 examples/preprocessing/plot_ica_from_epochs.py     |    1 -
 examples/realtime/plot_compute_rt_average.py       |   21 +-
 examples/stats/plot_sensor_permutation_test.py     |    3 +-
 .../time_frequency/plot_single_trial_spectra.py    |    2 +-
 examples/time_frequency/plot_tfr_topography.py     |    2 +-
 mne/__init__.py                                    |    3 +
 mne/commands/mne_bti2fiff.py                       |    2 +-
 mne/connectivity/spectral.py                       |   15 +-
 mne/coreg.py                                       |    5 +
 mne/cov.py                                         |    2 +-
 mne/fiff/kit/tests/__init__.py                     |    3 +
 mne/fiff/kit/tests/data/test_mrk_post.sqd          |  Bin 0 -> 68824 bytes
 mne/fiff/kit/tests/data/test_mrk_pre.sqd           |  Bin 0 -> 68824 bytes
 mne/fiff/raw.py                                    |    2 +-
 mne/fiff/tests/__init__.py                         |    3 +
 mne/forward/forward.py                             |   13 +-
 mne/forward/tests/test_forward.py                  |    1 +
 mne/forward/tests/test_make_forward.py             |    3 +-
 mne/gui/__init__.py                                |   89 ++
 mne/gui/_coreg_gui.py                              | 1372 ++++++++++++++++++++
 mne/gui/_fiducials_gui.py                          |  462 +++++++
 mne/gui/_file_traits.py                            |  427 ++++++
 mne/gui/_kit2fiff_gui.py                           |  518 ++++++++
 mne/gui/_marker_gui.py                             |  447 +++++++
 mne/gui/_viewer.py                                 |  340 +++++
 mne/{fiff/kit => gui}/tests/__init__.py            |    0
 mne/gui/tests/test_coreg_gui.py                    |  157 +++
 mne/gui/tests/test_fiducials_gui.py                |   68 +
 mne/gui/tests/test_file_traits.py                  |  110 ++
 mne/gui/tests/test_kit2fiff_gui.py                 |   65 +
 mne/gui/tests/test_marker_gui.py                   |   47 +
 mne/layouts/__init__.py                            |    3 +-
 mne/layouts/layout.py                              |  119 +-
 mne/layouts/tests/test_layout.py                   |  105 +-
 mne/minimum_norm/inverse.py                        |    6 +-
 mne/minimum_norm/tests/test_inverse.py             |    4 -
 mne/preprocessing/ica.py                           |    6 +-
 mne/preprocessing/tests/test_ica.py                |    5 +
 mne/source_estimate.py                             |   64 +-
 mne/tests/test_viz.py                              |  171 +--
 mne/utils.py                                       |   35 +
 mne/viz.py                                         |  115 +-
 setup.py                                           |    1 +
 56 files changed, 4613 insertions(+), 291 deletions(-)

diff --git a/doc/source/getting_started.rst b/doc/source/getting_started.rst
index 79c4149..826b680 100644
--- a/doc/source/getting_started.rst
+++ b/doc/source/getting_started.rst
@@ -70,12 +70,11 @@ development environments and functionality is best tested for them:
     * Debian / Ubuntu standard system Python + Scipy stack
 
 
-Note. To make Anaconda working with all examples and our test-suite a few
-manual adjustments might be necessary. This may require compiling the recent
-matplotlib development verion (http://goo.gl/bFZHhX, http://goo.gl/S81KHv)
-and manually adjusting the python interpreter invoked by the nosetests and
+Note for developers. To make Anaconda working with our test-suite a few
+manual adjustments might be necessary. This may require
+manually adjusting the python interpreter invoked by the nosetests and
 the sphinx-build 'binaries' (http://goo.gl/Atqh26).
-Tested on a recent MacBook Pro running Mac OS X 10.8
+Tested on a recent MacBook Pro running Mac OS X 10.8 and Mac OS X 10.9
 
 If you use another Python setup and you encounter some difficulties please 
 report them on the MNE mailing list or on github to get assistance.
diff --git a/doc/source/python_reference.rst b/doc/source/python_reference.rst
index ba12951..0fef8d9 100644
--- a/doc/source/python_reference.rst
+++ b/doc/source/python_reference.rst
@@ -342,10 +342,19 @@ MRI Processing
 
 .. currentmodule:: mne
 
+Step by step instructions for using :func:`gui.coregistration`:
+
+ - `Coregistration for subjects with structural MRI 
+   <http://www.slideshare.net/slideshow/embed_code/28598463>`_ 
+ - `Scaling a template MRI for subjects for which no MRI is available
+   <http://www.slideshare.net/slideshow/embed_code/28598561>`_
+
 .. autosummary::
    :toctree: generated/
    :template: function.rst
 
+   gui.coregistration
+   gui.fiducials
    create_default_subject
    scale_mri
    scale_labels
diff --git a/doc/source/whats_new.rst b/doc/source/whats_new.rst
index 45c535c..66fbc32 100644
--- a/doc/source/whats_new.rst
+++ b/doc/source/whats_new.rst
@@ -33,7 +33,9 @@ Changelog
 
    - Add rejection buffer to ICA.decompose* methods by `Denis Engemann`_ and `Alex Gramfort`_
 
-   - Add polygonal surface decimation function by `Denis Engemann`_ and `Alex Gramfort`_
+   - Improve ICA computation speed and memory usage by `Denis Engemann`_ and `Alex Gramfort`_
+
+   - Add polygonal surface decimation function to preprocess head surfaces for coregistration by `Denis Engemann`_ and `Alex Gramfort`_
 
    - DICS time-frequency beamforming for epochs, evoked and for estimating source power by `Roman Goj`_, `Alex Gramfort`_ and `Denis Engemann`_
 
@@ -74,13 +76,25 @@ Changelog
    - Add SPM face data set by `Denis Engemann`_ `Martin Luessi`_ and `Alex Gramfort`_
 
    - Support reading of EDF+,BDF data by `Teon Brooks`_
+   
+   - Tools for scaling MRIs (mne.scale_mri) by `Christian Brodbeck`_ 
+   
+   - GUI for head-MRI coregistration (mne.gui.coregistration) by `Christian Brodbeck`_
+
+   - GUI for ki2fiff conversion (mne.gui.kit2fiff) by `Christian Brodbeck`_
 
    - Support reading of EEG BrainVision data by `Teon Brooks`_
 
+   - Improve CTF compensation handling by `Martin Luessi`_ and `Eric Larson`_
+   
+   - Improve and extend automated layout guessing by `Denis Engemann`_
+
+   - Add Continuum Analytics Anaconda support by `Denis Engemann`_
+
 API
 ~~~
 
-   - The pick_normal parameter for minimum norm solvers has been renamed as pick_ori and normal orientation picking is now achieved by passing the value "normal" for the pick_ori parameter.
+   - The pick_normal parameter for minimum norm solvers has been renamed as `pick_ori` and normal orientation picking is now achieved by passing the value "normal" for the `pick_ori` parameter.
 
    - ICA objects now expose the measurment info of the object fitted.
 
@@ -88,6 +102,10 @@ API
 
    - Removed deprecated read/write_stc/w, use SourceEstimate methods instead
 
+   - The `chs` argument in `mne.layouts.find_layout` is deprecated and will be removed in MNE-Python 0.9. Use `info` instead.
+   
+   - `plot_evoked` and `Epochs.plot` now open a new figure by default. To plot on an existing figure please specify the `axes` parameter.
+
 .. _changes_0_6:
 
 Version 0.6
diff --git a/examples/connectivity/plot_cwt_sensor_connectivity.py b/examples/connectivity/plot_cwt_sensor_connectivity.py
index 715bded..6a5662c 100644
--- a/examples/connectivity/plot_cwt_sensor_connectivity.py
+++ b/examples/connectivity/plot_cwt_sensor_connectivity.py
@@ -24,7 +24,6 @@ import mne
 from mne import fiff
 from mne.connectivity import spectral_connectivity, seed_target_indices
 from mne.datasets import sample
-from mne.layouts import read_layout
 from mne.viz import plot_topo_tfr
 
 ###############################################################################
@@ -73,7 +72,8 @@ con[np.where(indices[1] == seed)] = 1.0
 
 # Show topography of connectivity from seed
 import matplotlib.pyplot as plt
-layout = read_layout('Vectorview-all')
 title = 'WPLI2 - Visual - Seed %s' % seed_ch
-plot_topo_tfr(epochs, con, freqs, layout, title=title)
+
+layout = mne.find_layout(epochs.info, 'meg')  # use full layout
+plot_topo_tfr(epochs, con, freqs, layout=layout, title=title)
 plt.show()
diff --git a/examples/datasets/plot_spm_faces_dataset.py b/examples/datasets/plot_spm_faces_dataset.py
index a854034..e62ac65 100644
--- a/examples/datasets/plot_spm_faces_dataset.py
+++ b/examples/datasets/plot_spm_faces_dataset.py
@@ -37,7 +37,6 @@ raw_fname = data_path + '/MEG/spm/SPM_CTF_MEG_example_faces%d_3D_raw.fif'
 
 raw = fiff.Raw(raw_fname % 1, preload=True) # Take first run
 
-
 picks = mne.fiff.pick_types(raw.info, meg=True, exclude='bads')
 raw.filter(1, 45, method='iir')
 
@@ -73,7 +72,6 @@ contrast = evoked[1] - evoked[0]
 evoked.append(contrast)
 
 for e in evoked:
-    plt.figure()
     e.plot(ylim=dict(mag=[-400, 400]))
 
 plt.show()
diff --git a/examples/inverse/plot_gamma_map_inverse.py b/examples/inverse/plot_gamma_map_inverse.py
index e092197..edf2243 100644
--- a/examples/inverse/plot_gamma_map_inverse.py
+++ b/examples/inverse/plot_gamma_map_inverse.py
@@ -13,7 +13,6 @@ NeuroImage, vol. 44, no. 3, pp. 947?66, Mar. 2009.
 print __doc__
 
 import numpy as np
-import matplotlib.pyplot as plt
 
 import mne
 from mne.datasets import sample
@@ -57,11 +56,9 @@ plot_sparse_source_estimates(forward['src'], stc, bgcolor=(1, 1, 1),
 # Show the evoked response and the residual for gradiometers
 ylim = dict(grad=[-120, 120])
 evoked = mne.fiff.pick_types_evoked(evoked, meg='grad', exclude='bads')
-plt.figure()
 evoked.plot(titles=dict(grad='Evoked Response Gradiometers'), ylim=ylim,
             proj=True)
 
 residual = mne.fiff.pick_types_evoked(residual, meg='grad', exclude='bads')
-plt.figure()
 residual.plot(titles=dict(grad='Residuals Gradiometers'), ylim=ylim,
               proj=True)
diff --git a/examples/inverse/plot_mixed_norm_L21_inverse.py b/examples/inverse/plot_mixed_norm_L21_inverse.py
index 4f9b9ff..44ff446 100644
--- a/examples/inverse/plot_mixed_norm_L21_inverse.py
+++ b/examples/inverse/plot_mixed_norm_L21_inverse.py
@@ -38,8 +38,6 @@ forward = mne.read_forward_solution(fwd_fname, surf_ori=True)
 
 cov = mne.cov.regularize(cov, evoked.info)
 
-import matplotlib.pyplot as plt
-plt.figure()
 ylim = dict(eeg=[-10, 10], grad=[-400, 400], mag=[-600, 600])
 evoked.plot(ylim=ylim, proj=True)
 
@@ -60,7 +58,6 @@ stc, residual = mixed_norm(evoked, forward, cov, alpha, loose=loose,
                            active_set_size=10, debias=True, weights=stc_dspm,
                            weights_min=8., return_residual=True)
 
-plt.figure()
 residual.plot(ylim=ylim, proj=True)
 
 ###############################################################################
diff --git a/examples/inverse/plot_time_frequency_mixed_norm_inverse.py b/examples/inverse/plot_time_frequency_mixed_norm_inverse.py
index 48f2a27..831a20a 100644
--- a/examples/inverse/plot_time_frequency_mixed_norm_inverse.py
+++ b/examples/inverse/plot_time_frequency_mixed_norm_inverse.py
@@ -96,14 +96,11 @@ stc.crop(tmin=-0.05, tmax=0.3)
 evoked.crop(tmin=-0.05, tmax=0.3)
 residual.crop(tmin=-0.05, tmax=0.3)
 
-import matplotlib.pyplot as plt
-plt.figure()
 ylim = dict(eeg=[-10, 10], grad=[-200, 250], mag=[-600, 600])
 picks = fiff.pick_types(evoked.info, meg='grad', exclude='bads')
 evoked.plot(picks=picks, ylim=ylim, proj=True,
             titles=dict(grad='Evoked Response (grad)'))
 
-plt.figure()
 picks = fiff.pick_types(residual.info, meg='grad', exclude='bads')
 residual.plot(picks=picks, ylim=ylim, proj=True,
               titles=dict(grad='Residual (grad)'))
diff --git a/examples/plot_evoked_delayed_ssp.py b/examples/plot_evoked_delayed_ssp.py
index 313f944..597736c 100644
--- a/examples/plot_evoked_delayed_ssp.py
+++ b/examples/plot_evoked_delayed_ssp.py
@@ -76,7 +76,7 @@ for proj, ax in zip(projs, axes.flatten()):
     evoked.copy().apply_proj().plot(axes=ax)  # apply on a copy of evoked
     ax.set_title('+ %s' % proj['desc'])  # extract description.
 plt.suptitle(title)
-plt.show()
+mne.viz.tight_layout()
 
 # We also could have easily visualized the impact of single projection vectors
 # by deleting the vector directly after visualizing the changes.
@@ -88,8 +88,6 @@ plt.show()
 # check box that allows us to reversibly select projection vectors. Any
 # modification of the selection will immediately cause the figure to update.
 
-plt.figure()
 evoked.plot(proj='interactive')
-plt.show()
 
 # Hint: the same works with evoked.plot_topomap
diff --git a/examples/plot_megsim_data.py b/examples/plot_megsim_data.py
index 1521563..d9ceec7 100644
--- a/examples/plot_megsim_data.py
+++ b/examples/plot_megsim_data.py
@@ -44,10 +44,8 @@ epochs = mne.Epochs(raw, events, event_id, tmin, tmax, picks=picks,
                     baseline=(None, 0),
                     reject=dict(grad=4000e-13, mag=4e-12, eog=150e-6))
 evoked = epochs.average()  # average epochs and get an Evoked dataset.
-plt.figure()
 evoked.plot()
 
 # Compare to the simulated data
 evoked_sim = mne.fiff.Evoked(evoked_fnames[0])
-plt.figure()
 evoked_sim.plot()
diff --git a/examples/plot_shift_evoked.py b/examples/plot_shift_evoked.py
index bec25b5..98a5f96 100644
--- a/examples/plot_shift_evoked.py
+++ b/examples/plot_shift_evoked.py
@@ -11,6 +11,7 @@ Shifting time-scale in evoked data
 print __doc__
 
 import matplotlib.pyplot as plt
+import mne
 from mne import fiff
 from mne.datasets import sample
 
@@ -26,18 +27,20 @@ picks = fiff.pick_channels(ch_names=evoked.info['ch_names'],
                            include="MEG 2332", exclude="bad")
 
 # Create subplots
-f, axarr = plt.subplots(3)
-evoked.plot(exclude=[], picks=picks, axes=axarr[0],
+f, (ax1, ax2, ax3) = plt.subplots(3)
+evoked.plot(exclude=[], picks=picks, axes=ax1,
             titles=dict(grad='Before time shifting'))
 
 # Apply relative time-shift of 500 ms
 evoked.shift_time(0.5, relative=True)
 
-evoked.plot(exclude=[], picks=picks, axes=axarr[1],
+evoked.plot(exclude=[], picks=picks, axes=ax2,
             titles=dict(grad='Relative shift: 500 ms'))
 
 # Apply absolute time-shift of 500 ms
 evoked.shift_time(0.5, relative=False)
 
-evoked.plot(exclude=[], picks=picks, axes=axarr[2],
+evoked.plot(exclude=[], picks=picks, axes=ax3,
             titles=dict(grad='Absolute shift: 500 ms'))
+
+mne.viz.tight_layout()
diff --git a/examples/plot_ssp_projs_topomaps.py b/examples/plot_ssp_projs_topomaps.py
index 5cec40e..cc49cde 100644
--- a/examples/plot_ssp_projs_topomaps.py
+++ b/examples/plot_ssp_projs_topomaps.py
@@ -7,7 +7,8 @@ This example shows how to display topographies of SSP projection vectors.
 The projections used are the ones correcting for ECG artifacts.
 """
 # Author: Alexandre Gramfort <gramfort at nmr.mgh.harvard.edu>
-#
+#         Denis A. Engemann <d.engemann at fz-juuelich.de>
+
 # License: BSD (3-clause)
 
 print __doc__
@@ -23,9 +24,8 @@ ave_fname = data_path + '/MEG/sample/sample_audvis-ave.fif'
 evoked = mne.fiff.read_evoked(ave_fname, setno='Left Auditory')
 projs = mne.read_proj(ecg_fname)
 
-layouts = [mne.layouts.read_layout('Vectorview-all'),
-           mne.layouts.make_eeg_layout(evoked.info)]
+layouts = [mne.find_layout(evoked.info, k) for k in 'meg', 'eeg']
 
-plt.figure(figsize=(10, 6))
+plt.figure(figsize=(12, 6))
 mne.viz.plot_projs_topomap(projs, layout=layouts)
-mne.viz.tight_layout()
+mne.viz.tight_layout(w_pad=0.5)
diff --git a/examples/plot_topo_channel_epochs_image.py b/examples/plot_topo_channel_epochs_image.py
index 312b7be..f2a5ef5 100644
--- a/examples/plot_topo_channel_epochs_image.py
+++ b/examples/plot_topo_channel_epochs_image.py
@@ -21,7 +21,6 @@ import matplotlib.pyplot as plt
 import mne
 from mne import fiff
 from mne.datasets import sample
-from mne.layouts import read_layout
 data_path = sample.data_path()
 
 ###############################################################################
@@ -47,7 +46,7 @@ epochs = mne.Epochs(raw, events, event_id, tmin, tmax, proj=True,
 ###############################################################################
 # Show event related fields images
 
-layout = read_layout('Vectorview-all')
+layout = mne.find_layout(epochs.info, 'meg')  # use full layout
 
 title = 'ERF images - MNE sample data'
 mne.viz.plot_topo_image_epochs(epochs, layout, sigma=0.5, vmin=-200, vmax=200,
diff --git a/examples/preprocessing/plot_ica_from_epochs.py b/examples/preprocessing/plot_ica_from_epochs.py
index 95f56a2..1153a5d 100644
--- a/examples/preprocessing/plot_ica_from_epochs.py
+++ b/examples/preprocessing/plot_ica_from_epochs.py
@@ -133,7 +133,6 @@ ica_epochs = ica.sources_as_epochs(epochs)
 # don't exclude bad sources by passing an empty list.
 ica_picks = mne.fiff.pick_types(ica_epochs.info, misc=True, exclude=[])
 ica_evoked = ica_epochs.average(ica_picks)
-plt.figure()
 ica_evoked.plot(titles=dict(misc='ICA sources'))
 
 # Tip: use this for epochs constructed around ECG r-peaks to check whether all
diff --git a/examples/realtime/plot_compute_rt_average.py b/examples/realtime/plot_compute_rt_average.py
index a9463c4..2f00283 100644
--- a/examples/realtime/plot_compute_rt_average.py
+++ b/examples/realtime/plot_compute_rt_average.py
@@ -18,8 +18,7 @@ print __doc__
 #
 # License: BSD (3-clause)
 
-import time
-
+import matplotlib.pyplot as plt
 import mne
 from mne.datasets import sample
 from mne.realtime import RtEpochs, MockRtClient
@@ -48,17 +47,11 @@ rt_epochs.start()
 
 # send raw buffers
 rt_client.send_data(rt_epochs, picks, tmin=0, tmax=150, buffer_size=1000)
-
-evoked = None
-
 for ii, ev in enumerate(rt_epochs.iter_evoked()):
-
     print "Just got epoch %d" % (ii + 1)
-
-    if evoked is None:
-        evoked = ev
-    else:
-        evoked += ev
-
-    evoked.plot()
-    time.sleep(0.1)
+    if ii > 0:
+        ev += evoked
+    evoked = ev
+    plt.clf() # clear canvas
+    evoked.plot(axes=plt.gca())  # plot on current figure
+    plt.pause(0.05)
diff --git a/examples/stats/plot_sensor_permutation_test.py b/examples/stats/plot_sensor_permutation_test.py
index 1586820..4597eaa 100644
--- a/examples/stats/plot_sensor_permutation_test.py
+++ b/examples/stats/plot_sensor_permutation_test.py
@@ -65,8 +65,7 @@ print "Sensors names : %s" % significant_sensors_names
 import matplotlib.pyplot as plt
 
 # load sensor layout
-from mne.layouts import read_layout
-layout = read_layout('Vectorview-grad')
+layout = mne.find_layout(epochs.info)
 
 # Extract mask and indices of active sensors in layout
 idx_of_sensors = [layout.names.index(name)
diff --git a/examples/time_frequency/plot_single_trial_spectra.py b/examples/time_frequency/plot_single_trial_spectra.py
index 735ac7b..0213f9f 100644
--- a/examples/time_frequency/plot_single_trial_spectra.py
+++ b/examples/time_frequency/plot_single_trial_spectra.py
@@ -70,7 +70,7 @@ ax2.set_title('averaged over trials', fontsize=10)
 ax2.imshow(average_psds[:, freq_mask].T, aspect='auto', origin='lower')
 ax2.set_xticks(np.arange(0, len(picks), 30))
 ax2.set_xticklabels(picks[::30])
-ax2.set_xlabel('MEG channel index (Gradiomemters)')
+ax2.set_xlabel('MEG channel index (Gradiometers)')
 
 mne.viz.tight_layout()
 plt.show()
diff --git a/examples/time_frequency/plot_tfr_topography.py b/examples/time_frequency/plot_tfr_topography.py
index 9b3b87e..bf2c75b 100644
--- a/examples/time_frequency/plot_tfr_topography.py
+++ b/examples/time_frequency/plot_tfr_topography.py
@@ -41,7 +41,7 @@ epochs = mne.Epochs(raw, events, event_id, tmin, tmax, picks=picks,
                     baseline=(None, 0), reject=dict(grad=4000e-13, eog=150e-6))
 data = epochs.get_data()  # as 3D matrix
 
-layout = mne.layouts.read_layout('Vectorview-all')
+layout = mne.find_layout(epochs.info, 'meg')
 
 ###############################################################################
 # Calculate power and phase locking value
diff --git a/mne/__init__.py b/mne/__init__.py
index 0533066..3948289 100644
--- a/mne/__init__.py
+++ b/mne/__init__.py
@@ -48,6 +48,8 @@ from .proj import (read_proj, write_proj, compute_proj_epochs,
                    compute_proj_evoked, compute_proj_raw, sensitivity_map)
 from .selection import read_selection
 from .dipole import read_dip
+from .layouts.layout import find_layout
+
 from . import beamformer
 from . import connectivity
 from . import coreg
@@ -56,6 +58,7 @@ from . import datasets
 from . import epochs
 from . import fiff
 from . import filter
+from . import gui
 from . import layouts
 from . import minimum_norm
 from . import mixed_norm
diff --git a/mne/commands/mne_bti2fiff.py b/mne/commands/mne_bti2fiff.py
index 4eefcac..0295f83 100755
--- a/mne/commands/mne_bti2fiff.py
+++ b/mne/commands/mne_bti2fiff.py
@@ -2,7 +2,7 @@
 """
 Import BTi / 4D MagnesWH3600 data to fif file.
 
-example usage: mne bti2fiff -pdf C,rfDC -o my_raw.fif
+example usage: mne bti2fiff --pdf C,rfDC -o my_raw.fif
 
 Note.
 1) Currently direct inclusion of reference channel weights
diff --git a/mne/connectivity/spectral.py b/mne/connectivity/spectral.py
index bad4cbb..25b1145 100644
--- a/mne/connectivity/spectral.py
+++ b/mne/connectivity/spectral.py
@@ -9,7 +9,7 @@ import numpy as np
 from scipy.fftpack import fftfreq
 
 from .utils import check_indices
-from ..fixes import tril_indices
+from ..fixes import tril_indices, partial
 from ..parallel import parallel_func
 from ..source_estimate import _BaseSourceEstimate
 from .. import Epochs
@@ -325,8 +325,9 @@ def _epoch_spectral_connectivity(data, sig_idx, tmin_idx, tmax_idx, sfreq,
             else:
                 this_sig_idx = sig_idx
             if isinstance(this_data, _BaseSourceEstimate):
-                this_x_mt = this_data.transform_data(_mt_spectra,
-                                        fun_args=(window_fun, sfreq),
+                _mt_spectra_partial = partial(_mt_spectra, dpss=window_fun,
+                                              sfreq=sfreq)
+                this_x_mt = this_data.transform_data(_mt_spectra_partial,
                                         idx=this_sig_idx, tmin_idx=tmin_idx,
                                         tmax_idx=tmax_idx)
             else:
@@ -378,9 +379,11 @@ def _epoch_spectral_connectivity(data, sig_idx, tmin_idx, tmax_idx, sfreq,
             else:
                 this_sig_idx = sig_idx
             if isinstance(this_data, _BaseSourceEstimate):
-                this_x_cwt = this_data.transform_data(cwt,
-                    fun_args=(wavelets,), idx=this_sig_idx, tmin_idx=tmin_idx,
-                    tmax_idx=tmax_idx, use_fft=True, mode='same')
+                cwt_partial = partial(cwt, Ws=wavelets, use_fft=True,
+                                      mode='same')
+                this_x_cwt = this_data.transform_data(cwt_partial,
+                                idx=this_sig_idx, tmin_idx=tmin_idx,
+                                tmax_idx=tmax_idx)
             else:
                 this_x_cwt = cwt(this_data[this_sig_idx, tmin_idx:tmax_idx],
                                  wavelets, use_fft=True, mode='same')
diff --git a/mne/coreg.py b/mne/coreg.py
index 48fb3ad..4ae10e6 100644
--- a/mne/coreg.py
+++ b/mne/coreg.py
@@ -878,6 +878,11 @@ def scale_mri(subject_from, subject_to, scale, overwrite=False,
         If an MRI already exists for subject_to, overwrite it.
     subjects_dir : None | str
         Override the SUBJECTS_DIR environment variable.
+
+    See Also
+    --------
+    scale_labels : add labels to a scaled MRI
+    scale_source_space : add a source space to a scaled MRI
     """
     subjects_dir = get_subjects_dir(subjects_dir, raise_error=True)
     paths = _find_mri_paths(subject_from, subjects_dir=subjects_dir)
diff --git a/mne/cov.py b/mne/cov.py
index af6c6b3..4209041 100644
--- a/mne/cov.py
+++ b/mne/cov.py
@@ -558,7 +558,7 @@ def regularize(cov, info, mag=0.1, grad=0.1, eeg=0.1, exclude=None,
     """Regularize noise covariance matrix
 
     This method works by adding a constant to the diagonal for each
-    channel type separatly. Special care is taken to keep the
+    channel type separately. Special care is taken to keep the
     rank of the data constant.
 
     Parameters
diff --git a/mne/fiff/kit/tests/__init__.py b/mne/fiff/kit/tests/__init__.py
index e69de29..aba6507 100644
--- a/mne/fiff/kit/tests/__init__.py
+++ b/mne/fiff/kit/tests/__init__.py
@@ -0,0 +1,3 @@
+import os.path as op
+
+data_dir = op.join(op.dirname(__file__), 'data')
diff --git a/mne/fiff/kit/tests/data/test_mrk_post.sqd b/mne/fiff/kit/tests/data/test_mrk_post.sqd
new file mode 100755
index 0000000..52551f5
Binary files /dev/null and b/mne/fiff/kit/tests/data/test_mrk_post.sqd differ
diff --git a/mne/fiff/kit/tests/data/test_mrk_pre.sqd b/mne/fiff/kit/tests/data/test_mrk_pre.sqd
new file mode 100755
index 0000000..2413a87
Binary files /dev/null and b/mne/fiff/kit/tests/data/test_mrk_pre.sqd differ
diff --git a/mne/fiff/raw.py b/mne/fiff/raw.py
index 0fd2db6..f39bc80 100644
--- a/mne/fiff/raw.py
+++ b/mne/fiff/raw.py
@@ -1067,7 +1067,7 @@ class Raw(ProjMixin):
             'original' plots in the order of ch_names, array gives the
             indices to use in plotting.
         show_options : bool
-            If True, a dialog for options related to projecion is shown.
+            If True, a dialog for options related to projection is shown.
         title : str | None
             The title of the window. If None, and either the filename of the
             raw object or '<unknown>' will be displayed as title.
diff --git a/mne/fiff/tests/__init__.py b/mne/fiff/tests/__init__.py
index e69de29..aba6507 100644
--- a/mne/fiff/tests/__init__.py
+++ b/mne/fiff/tests/__init__.py
@@ -0,0 +1,3 @@
+import os.path as op
+
+data_dir = op.join(op.dirname(__file__), 'data')
diff --git a/mne/forward/forward.py b/mne/forward/forward.py
index 0ceb791..8ebf7f3 100644
--- a/mne/forward/forward.py
+++ b/mne/forward/forward.py
@@ -367,7 +367,8 @@ def read_forward_solution(fname, force_fixed=False, surf_ori=False,
     force_fixed : bool, optional (default False)
         Force fixed source orientation mode?
     surf_ori : bool, optional (default False)
-        Use surface-based source coordinate system?
+        Use surface-based source coordinate system? Note that force_fixed=True
+        implies surf_ori=True.
     include : list, optional
         List of names of channels to include. If empty all channels
         are included.
@@ -519,7 +520,6 @@ def read_forward_solution(fname, force_fixed=False, surf_ori=False,
     # deal with transformations, storing orig copies so transforms can be done
     # as necessary later
     fwd['_orig_source_ori'] = fwd['source_ori']
-    fwd['surf_ori'] = False  # tell it that it's not surf oriented by default
     convert_forward_solution(fwd, surf_ori, force_fixed, copy=False)
     fwd = pick_channels_forward(fwd, include=include, exclude=exclude)
     return fwd
@@ -535,7 +535,8 @@ def convert_forward_solution(fwd, surf_ori=False, force_fixed=False,
     fwd : dict
         The forward solution to modify.
     surf_ori : bool, optional (default False)
-        Use surface-based source coordinate system?
+        Use surface-based source coordinate system? Note that force_fixed=True
+        implies surf_ori=True.
     force_fixed : bool, optional (default False)
         Force fixed source orientation mode?
     copy : bool, optional (default True)
@@ -566,7 +567,7 @@ def convert_forward_solution(fwd, surf_ori=False, force_fixed=False,
         #   Modify the forward solution for fixed source orientations
         if not is_fixed_orient(fwd, orig=True):
             logger.info('    Changing to fixed-orientation forward '
-                        'solution...')
+                        'solution with surface-based source orientations...')
             fix_rot = _block_diag(fwd['source_nn'].T, 1)
             # newer versions of numpy require explicit casting here, so *= no
             # longer works
@@ -581,6 +582,7 @@ def convert_forward_solution(fwd, surf_ori=False, force_fixed=False,
                 fwd['sol_grad']['ncol'] = 3 * fwd['nsource']
             logger.info('    [done]')
         fwd['source_ori'] = FIFF.FIFFV_MNE_FIXED_ORI
+        fwd['surf_ori'] = True
     elif surf_ori:  # Free, surf-oriented
         #   Rotate the local source coordinate systems
         nuse_total = sum([s['nuse'] for s in fwd['src']])
@@ -623,6 +625,7 @@ def convert_forward_solution(fwd, surf_ori=False, force_fixed=False,
             fwd['sol_grad']['ncol'] = 3 * fwd['nsource']
         logger.info('[done]')
         fwd['source_ori'] = FIFF.FIFFV_MNE_FREE_ORI
+        fwd['surf_ori'] = True
     else:  # Free, cartesian
         logger.info('    Cartesian source orientations...')
         fwd['source_nn'] = np.kron(np.ones((fwd['nsource'], 1)), np.eye(3))
@@ -632,9 +635,9 @@ def convert_forward_solution(fwd, surf_ori=False, force_fixed=False,
             fwd['sol_grad']['data'] = fwd['_orig_sol_grad'].copy()
             fwd['sol_grad']['ncol'] = 3 * fwd['nsource']
         fwd['source_ori'] = FIFF.FIFFV_MNE_FREE_ORI
+        fwd['surf_ori'] = False
         logger.info('[done]')
 
-    fwd['surf_ori'] = surf_ori
     return fwd
 
 
diff --git a/mne/forward/tests/test_forward.py b/mne/forward/tests/test_forward.py
index 27f3bfd..ce268ac 100644
--- a/mne/forward/tests/test_forward.py
+++ b/mne/forward/tests/test_forward.py
@@ -116,6 +116,7 @@ def test_io_forward():
     assert_equal(len(fwd['info']['chs']), 306)
     assert_true('dev_head_t' in fwd['info'])
     assert_true('mri_head_t' in fwd)
+    assert_true(fwd['surf_ori'])
 
 
 @sample.requires_sample_data
diff --git a/mne/forward/tests/test_make_forward.py b/mne/forward/tests/test_make_forward.py
index 7095532..8fd17c8 100644
--- a/mne/forward/tests/test_make_forward.py
+++ b/mne/forward/tests/test_make_forward.py
@@ -3,14 +3,13 @@ import os.path as op
 from subprocess import CalledProcessError
 
 from nose.tools import assert_raises
-import numpy as np
 from numpy.testing import (assert_equal, assert_allclose)
 
 from mne.datasets import sample
 from mne.fiff import Raw
 from mne.fiff.kit import read_raw_kit
 from mne.fiff.bti import read_raw_bti
-from mne import (Epochs, read_forward_solution, make_forward_solution,
+from mne import (read_forward_solution, make_forward_solution,
                  do_forward_solution, setup_source_space, read_trans,
                  convert_forward_solution)
 from mne.utils import requires_mne, _TempDir
diff --git a/mne/gui/__init__.py b/mne/gui/__init__.py
new file mode 100644
index 0000000..bf8326f
--- /dev/null
+++ b/mne/gui/__init__.py
@@ -0,0 +1,89 @@
+"""Convenience functions for opening GUIs."""
+
+# Authors: Christian Brodbeck <christianbrodbeck at nyu.edu>
+#
+# License: BSD (3-clause)
+
+
+def combine_kit_markers():
+    """Create a new KIT marker file by interpolating two marker files
+
+    Notes
+    -----
+    The functionality in this GUI is also part of :func:`kit2fiff`.
+    """
+    from ._marker_gui import CombineMarkersFrame
+    gui = CombineMarkersFrame()
+    gui.configure_traits()
+    return gui
+
+
+def coregistration(tabbed=False, split=True, scene_width=01, raw=None,
+                   subject=None, subjects_dir=None):
+    """Coregister an MRI with a subject's head shape
+
+    Parameters
+    ----------
+    tabbed : bool
+        Combine the data source panel and the coregistration panel into a
+        single panel with tabs.
+    split : bool
+        Split the main panels with a movable splitter (good for QT4 but
+        unnecessary for wx backend).
+    scene_width : int
+        Specify a minimum width for the 3d scene (in pixels).
+    raw : None | str(path)
+        Path to a raw file containing the digitizer data.
+    subject : None | str
+        Name of the mri subject.
+    subjects_dir : None | path
+        Override the SUBJECTS_DIR environment variable
+        (sys.environ['SUBJECTS_DIR'])
+
+    Notes
+    -----
+    All parameters are optional, since they can be set through the GUI.
+    Step by step instructions for the coregistrations can be accessed as
+    slides, `for subjects with structural MRI
+    <http://www.slideshare.net/slideshow/embed_code/28598463>`_ and `for
+    subjects for which no MRI is available
+    <http://www.slideshare.net/slideshow/embed_code/28598561>`_.
+    """
+    from ._coreg_gui import CoregFrame, _make_view
+    view = _make_view(tabbed, split, scene_width)
+    gui = CoregFrame(raw, subject, subjects_dir)
+    gui.configure_traits(view=view)
+    return gui
+
+
+def fiducials(subject=None, fid_file=None, subjects_dir=None):
+    """Set the fiducials for an MRI subject
+
+    Parameters
+    ----------
+    subject : str
+        Name of the mri subject.
+    fid_file : None | str
+        Load a fiducials file different form the subject's default
+        ("{subjects_dir}/{subject}/bem/{subject}-fiducials.fif").
+    subjects_dir : None | str
+        Overrule the subjects_dir environment variable.
+
+    Notes
+    -----
+    All parameters are optional, since they can be set through the GUI.
+    The functionality in this GUI is also part of :func:`coregistration`.
+    """
+    from ._fiducials_gui import FiducialsFrame
+    gui = FiducialsFrame(subject, subjects_dir, fid_file=fid_file)
+    gui.configure_traits()
+    return gui
+
+
+def kit2fiff():
+    """Convert KIT files to the fiff format
+    """
+    from ._kit2fiff_gui import Kit2FiffFrame
+    gui = Kit2FiffFrame()
+    gui.configure_traits()
+    return gui
diff --git a/mne/gui/_coreg_gui.py b/mne/gui/_coreg_gui.py
new file mode 100644
index 0000000..7bb0225
--- /dev/null
+++ b/mne/gui/_coreg_gui.py
@@ -0,0 +1,1372 @@
+"""Traits-based GUI for head-MRI coregistration"""
+
+# Authors: Christian Brodbeck <christianbrodbeck at nyu.edu>
+#
+# License: BSD (3-clause)
+
+from copy import deepcopy
+import os
+from Queue import Queue
+import re
+from threading import Thread
+
+import numpy as np
+from scipy.spatial.distance import cdist
+
+# allow import without traits
+try:
+    from mayavi.core.ui.mayavi_scene import MayaviScene
+    from mayavi.tools.mlab_scene_model import MlabSceneModel
+    from pyface.api import (error, confirm, warning, OK, YES, information,
+                            FileDialog, GUI)
+    from traits.api import (Bool, Button, cached_property, DelegatesTo,
+                            Directory, Enum, Float, HasTraits,
+                            HasPrivateTraits, Instance, Int, on_trait_change,
+                            Property, Str)
+    from traitsui.api import (View, Item, Group, HGroup, VGroup, VGrid,
+                              EnumEditor, Handler, Label, TextEditor)
+    from traitsui.menu import Action, UndoButton, CancelButton, NoButtons
+    from tvtk.pyface.scene_editor import SceneEditor
+except:
+    from ..utils import trait_wraith
+    HasTraits = object
+    HasPrivateTraits = object
+    Handler = object
+    cached_property = trait_wraith
+    on_trait_change = trait_wraith
+    MayaviScene = trait_wraith
+    MlabSceneModel = trait_wraith
+    Bool = trait_wraith
+    Button = trait_wraith
+    DelegatesTo = trait_wraith
+    Directory = trait_wraith
+    Enum = trait_wraith
+    Float = trait_wraith
+    Instance = trait_wraith
+    Int = trait_wraith
+    Property = trait_wraith
+    Str = trait_wraith
+    View = trait_wraith
+    Item = trait_wraith
+    Group = trait_wraith
+    HGroup = trait_wraith
+    VGroup = trait_wraith
+    VGrid = trait_wraith
+    EnumEditor = trait_wraith
+    Label = trait_wraith
+    TextEditor = trait_wraith
+    Action = trait_wraith
+    UndoButton = trait_wraith
+    CancelButton = trait_wraith
+    NoButtons = trait_wraith
+    SceneEditor = trait_wraith
+
+
+from ..coreg import bem_fname, trans_fname
+from ..fiff import FIFF
+from ..forward import prepare_bem_model
+from ..transforms import (write_trans, read_trans, apply_trans, rotation,
+                          translation, scaling, rotation_angles)
+from ..coreg import (fit_matched_points, fit_point_cloud, scale_mri,
+                     _point_cloud_error)
+from ..utils import get_subjects_dir, logger
+from ._fiducials_gui import MRIHeadWithFiducialsModel, FiducialsPanel
+from ._file_traits import (assert_env_set, trans_wildcard, RawSource,
+                           SubjectSelectorPanel)
+from ._viewer import (defaults, HeadViewController, PointObject, SurfaceObject,
+                      headview_borders)
+
+
+laggy_float_editor = TextEditor(auto_set=False, enter_set=True, evaluate=float)
+
+
+class CoregModel(HasPrivateTraits):
+    """Traits object for estimating the head mri transform.
+
+    Notes
+    -----
+    Transform from head to mri space is modeled with the following steps:
+
+     * move the head shape to its nasion position
+     * rotate the head shape with user defined rotation around its nasion
+     * move the head shape by user defined translation
+     * move the head shape origin to the mri nasion
+
+    If MRI scaling is enabled,
+
+     * the MRI is scaled relative to its origin center (prior to any
+       transformation of the digitizer head)
+
+
+    Don't sync transforms to anything to prevent them from being recomputed
+    upon every parameter change.
+    """
+    # data sources
+    mri = Instance(MRIHeadWithFiducialsModel, ())
+    hsp = Instance(RawSource, ())
+
+    # parameters
+    n_scale_params = Enum(0, 1, 3, desc="Scale the MRI to better fit the "
+                          "subject's head shape (a new MRI subject will be "
+                          "created with a name specified upon saving)")
+    scale_x = Float(1, label="Right (X)")
+    scale_y = Float(1, label="Anterior (Y)")
+    scale_z = Float(1, label="Superior (Z)")
+    rot_x = Float(0, label="Right (X)")
+    rot_y = Float(0, label="Anterior (Y)")
+    rot_z = Float(0, label="Superior (Z)")
+    trans_x = Float(0, label="Right (X)")
+    trans_y = Float(0, label="Anterior (Y)")
+    trans_z = Float(0, label="Superior (Z)")
+
+    # secondary to parameters
+    scale = Property(depends_on=['n_scale_params', 'scale_x', 'scale_y',
+                                 'scale_z'])
+    has_fid_data = Property(Bool, depends_on=['mri_origin', 'hsp.nasion'],
+                            desc="Required fiducials data is present.")
+    has_pts_data = Property(Bool, depends_on=['mri.points', 'hsp.points'])
+
+    # MRI dependent
+    mri_origin = Property(depends_on=['mri.nasion', 'scale'],
+                          desc="Coordinates of the scaled MRI's nasion.")
+
+    # target transforms
+    mri_scale_trans = Property(depends_on=['scale'])
+    head_mri_trans = Property(depends_on=['hsp.nasion', 'rot_x', 'rot_y',
+                                          'rot_z', 'trans_x', 'trans_y',
+                                          'trans_z', 'mri_origin'],
+                              desc="Transformaiton of the head shape to "
+                              "match the scaled MRI.")
+
+    # info
+    can_save = Property(Bool, depends_on=['head_mri_trans'])
+    raw_subject = Property(depends_on='hsp.raw_fname', desc="Subject guess "
+                           "based on the raw file name.")
+    lock_fiducials = DelegatesTo('mri')
+
+    # transformed geometry
+    transformed_mri_points = Property(depends_on=['mri.points',
+                                                  'mri_scale_trans'])
+    transformed_hsp_points = Property(depends_on=['hsp.points',
+                                                  'head_mri_trans'])
+    transformed_mri_lpa = Property(depends_on=['mri.lpa', 'mri_scale_trans'])
+    transformed_hsp_lpa = Property(depends_on=['hsp.lpa', 'head_mri_trans'])
+    transformed_mri_nasion = Property(depends_on=['mri.nasion',
+                                                  'mri_scale_trans'])
+    transformed_hsp_nasion = Property(depends_on=['hsp.nasion',
+                                                  'head_mri_trans'])
+    transformed_mri_rpa = Property(depends_on=['mri.rpa', 'mri_scale_trans'])
+    transformed_hsp_rpa = Property(depends_on=['hsp.rpa', 'head_mri_trans'])
+
+    # fit properties
+    lpa_distance = Property(depends_on=['transformed_mri_lpa',
+                                        'transformed_hsp_lpa'])
+    nasion_distance = Property(depends_on=['transformed_mri_nasion',
+                                           'transformed_hsp_nasion'])
+    rpa_distance = Property(depends_on=['transformed_mri_rpa',
+                                        'transformed_hsp_rpa'])
+    point_distance = Property(depends_on=['transformed_mri_points',
+                                          'transformed_hsp_points'])
+
+    # fit property info strings
+    fid_eval_str = Property(depends_on=['lpa_distance', 'nasion_distance',
+                                        'rpa_distance'])
+    points_eval_str = Property(depends_on='point_distance')
+
+    @cached_property
+    def _get_can_save(self):
+        return np.any(self.head_mri_trans != np.eye(4))
+
+    @cached_property
+    def _get_has_pts_data(self):
+        has = (np.any(self.mri.points) and np.any(self.hsp.points))
+        return has
+
+    @cached_property
+    def _get_has_fid_data(self):
+        has = (np.any(self.mri_origin) and np.any(self.hsp.nasion))
+        return has
+
+    @cached_property
+    def _get_scale(self):
+        if self.n_scale_params == 0:
+            return np.array(1)
+        elif self.n_scale_params == 1:
+            return np.array(self.scale_x)
+        else:
+            return np.array([self.scale_x, self.scale_y, self.scale_z])
+
+    @cached_property
+    def _get_mri_scale_trans(self):
+        if np.isscalar(self.scale) or self.scale.ndim == 0:
+            if self.scale == 1:
+                return np.eye(4)
+            else:
+                s = self.scale
+                return scaling(s, s, s)
+        else:
+            return scaling(*self.scale)
+
+    @cached_property
+    def _get_mri_origin(self):
+        if np.isscalar(self.scale) and self.scale == 1:
+            return self.mri.nasion
+        else:
+            return self.mri.nasion * self.scale
+
+    @cached_property
+    def _get_head_mri_trans(self):
+        if not self.has_fid_data:
+            return np.eye(4)
+
+        # move hsp so that its nasion becomes the origin
+        x, y, z = -self.hsp.nasion[0]
+        trans = translation(x, y, z)
+
+        # rotate hsp by rotation parameters
+        rot = rotation(self.rot_x, self.rot_y, self.rot_z)
+        trans = np.dot(rot, trans)
+
+        # move hsp by translation parameters
+        transl = translation(self.trans_x, self.trans_y, self.trans_z)
+        trans = np.dot(transl, trans)
+
+        # move the hsp origin(/nasion) to the MRI's nasion
+        x, y, z = self.mri_origin[0]
+        tgt_mri_trans = translation(x, y, z)
+        trans = np.dot(tgt_mri_trans, trans)
+
+        return trans
+
+    @cached_property
+    def _get_transformed_mri_points(self):
+        return apply_trans(self.mri_scale_trans, self.mri.points)
+
+    @cached_property
+    def _get_transformed_mri_lpa(self):
+        return apply_trans(self.mri_scale_trans, self.mri.lpa)
+
+    @cached_property
+    def _get_transformed_mri_nasion(self):
+        return apply_trans(self.mri_scale_trans, self.mri.nasion)
+
+    @cached_property
+    def _get_transformed_mri_rpa(self):
+        return apply_trans(self.mri_scale_trans, self.mri.rpa)
+
+    @cached_property
+    def _get_transformed_hsp_points(self):
+        return apply_trans(self.head_mri_trans, self.hsp.points)
+
+    @cached_property
+    def _get_transformed_hsp_lpa(self):
+        return apply_trans(self.head_mri_trans, self.hsp.lpa)
+
+    @cached_property
+    def _get_transformed_hsp_nasion(self):
+        return apply_trans(self.head_mri_trans, self.hsp.nasion)
+
+    @cached_property
+    def _get_transformed_hsp_rpa(self):
+        return apply_trans(self.head_mri_trans, self.hsp.rpa)
+
+    @cached_property
+    def _get_lpa_distance(self):
+        d = np.ravel(self.transformed_mri_lpa - self.transformed_hsp_lpa)
+        return np.sqrt(np.dot(d, d))
+
+    @cached_property
+    def _get_nasion_distance(self):
+        d = np.ravel(self.transformed_mri_nasion - self.transformed_hsp_nasion)
+        return np.sqrt(np.dot(d, d))
+
+    @cached_property
+    def _get_rpa_distance(self):
+        d = np.ravel(self.transformed_mri_rpa - self.transformed_hsp_rpa)
+        return np.sqrt(np.dot(d, d))
+
+    @cached_property
+    def _get_point_distance(self):
+        if (len(self.transformed_hsp_points) == 0
+            or len(self.transformed_mri_points) == 0):
+            return
+        dists = cdist(self.transformed_hsp_points, self.transformed_mri_points,
+                      'euclidean')
+        dists = np.min(dists, 1)
+        return dists
+
+    @cached_property
+    def _get_fid_eval_str(self):
+        d = (self.lpa_distance * 1000, self.nasion_distance * 1000,
+             self.rpa_distance * 1000)
+        txt = ("Fiducials Error: LPA %.1f mm, NAS %.1f mm, RPA %.1f mm" % d)
+        return txt
+
+    @cached_property
+    def _get_points_eval_str(self):
+        if self.point_distance is None:
+            return ""
+        av_dist = np.mean(self.point_distance)
+        return "Average Points Error: %.1f mm" % (av_dist * 1000)
+
+    def _get_raw_subject(self):
+        # subject name guessed based on the raw file name
+        if '_' in self.hsp.raw_fname:
+            subject, _ = self.hsp.raw_fname.split('_', 1)
+            if not subject:
+                subject = None
+        else:
+            subject = None
+        return subject
+
+    @on_trait_change('raw_subject')
+    def _on_raw_subject_change(self, subject):
+        if subject in self.mri.subject_source.subjects:
+            self.mri.subject = subject
+        elif 'fsaverage' in self.mri.subject_source.subjects:
+            self.mri.subject = 'fsaverage'
+
+    def omit_hsp_points(self, distance=0, reset=False):
+        """Exclude head shape points that are far away from the MRI head
+
+        Parameters
+        ----------
+        distance : float
+            Exclude all points that are further away from the MRI head than
+            this distance. Previously excluded points are still excluded unless
+            reset=True is specified. A value of distance <= 0 excludes nothing.
+        reset : bool
+            Reset the filter before calculating new omission (default is
+            False).
+        """
+        distance = float(distance)
+        if reset:
+            logger.info("Coregistration: Reset excluded head shape points")
+            self.hsp.points_filter = None
+
+        if distance <= 0:
+            return
+
+        # find the new filter
+        hsp_pts = self.transformed_hsp_points
+        mri_pts = self.transformed_mri_points
+        point_distance = _point_cloud_error(hsp_pts, mri_pts)
+        new_sub_filter = point_distance <= distance
+        n_excluded = np.sum(new_sub_filter == False)
+        logger.info("Coregistration: Excluding %i head shape points with "
+                    "distance >= %.3f m.", n_excluded, distance)
+
+        # combine the new filter with the previous filter
+        old_filter = self.hsp.points_filter
+        if old_filter is None:
+            new_filter = new_sub_filter
+        else:
+            new_filter = np.ones(len(self.hsp.raw_points), np.bool8)
+            new_filter[old_filter] = new_sub_filter
+
+        # set the filter
+        self.hsp.points_filter = new_filter
+
+    def fit_auricular_points(self):
+        "Find rotation to fit LPA and RPA"
+        src_fid = np.vstack((self.hsp.lpa, self.hsp.rpa))
+        src_fid -= self.hsp.nasion
+
+        tgt_fid = np.vstack((self.mri.lpa, self.mri.rpa))
+        tgt_fid -= self.mri.nasion
+        tgt_fid *= self.scale
+        tgt_fid -= [self.trans_x, self.trans_y, self.trans_z]
+
+        x0 = (self.rot_x, self.rot_y, self.rot_z)
+        rot = fit_matched_points(src_fid, tgt_fid, rotate=True,
+                                 translate=False, x0=x0, out='params')
+
+        self.rot_x, self.rot_y, self.rot_z = rot
+
+    def fit_fiducials(self):
+        "Find rotation and translation to fit all 3 fiducials"
+        src_fid = np.vstack((self.hsp.lpa, self.hsp.nasion, self.hsp.rpa))
+        src_fid -= self.hsp.nasion
+
+        tgt_fid = np.vstack((self.mri.lpa, self.mri.nasion, self.mri.rpa))
+        tgt_fid -= self.mri.nasion
+        tgt_fid *= self.scale
+
+        x0 = (self.rot_x, self.rot_y, self.rot_z, self.trans_x, self.trans_y,
+              self.trans_z)
+        est = fit_matched_points(src_fid, tgt_fid, x0=x0, out='params')
+
+        self.rot_x, self.rot_y, self.rot_z = est[:3]
+        self.trans_x, self.trans_y, self.trans_z = est[3:]
+
+    def fit_hsp_points(self):
+        "Find rotation to fit head shapes"
+        src_pts = self.hsp.points - self.hsp.nasion
+
+        tgt_pts = self.mri.points - self.mri.nasion
+        tgt_pts *= self.scale
+        tgt_pts -= [self.trans_x, self.trans_y, self.trans_z]
+
+        x0 = (self.rot_x, self.rot_y, self.rot_z)
+        rot = fit_point_cloud(src_pts, tgt_pts, rotate=True, translate=False,
+                              x0=x0)
+
+        self.rot_x, self.rot_y, self.rot_z = rot
+
+    def fit_scale_auricular_points(self):
+        "Find rotation and MRI scaling based on LPA and RPA"
+        src_fid = np.vstack((self.hsp.lpa, self.hsp.rpa))
+        src_fid -= self.hsp.nasion
+
+        tgt_fid = np.vstack((self.mri.lpa, self.mri.rpa))
+        tgt_fid -= self.mri.nasion
+        tgt_fid -= [self.trans_x, self.trans_y, self.trans_z]
+
+        x0 = (self.rot_x, self.rot_y, self.rot_z, 1. / self.scale_x)
+        x = fit_matched_points(src_fid, tgt_fid, rotate=True, translate=False,
+                               scale=1, x0=x0, out='params')
+
+        self.scale_x = 1. / x[3]
+        self.rot_x, self.rot_y, self.rot_z = x[:3]
+
+    def fit_scale_fiducials(self):
+        "Find translation, rotation and scaling based on the three fiducials"
+        src_fid = np.vstack((self.hsp.lpa, self.hsp.nasion, self.hsp.rpa))
+        src_fid -= self.hsp.nasion
+
+        tgt_fid = np.vstack((self.mri.lpa, self.mri.nasion, self.mri.rpa))
+        tgt_fid -= self.mri.nasion
+
+        x0 = (self.rot_x, self.rot_y, self.rot_z, self.trans_x, self.trans_y,
+              self.trans_z, 1. / self.scale_x,)
+        est = fit_matched_points(src_fid, tgt_fid, rotate=True, translate=True,
+                                 scale=1, x0=x0, out='params')
+
+        self.scale_x = 1. / est[6]
+        self.rot_x, self.rot_y, self.rot_z = est[:3]
+        self.trans_x, self.trans_y, self.trans_z = est[3:6]
+
+    def fit_scale_hsp_points(self):
+        "Find MRI scaling and rotation to match head shape points"
+        src_pts = self.hsp.points - self.hsp.nasion
+
+        tgt_pts = self.mri.points - self.mri.nasion
+
+        if self.n_scale_params == 1:
+            x0 = (self.rot_x, self.rot_y, self.rot_z, 1. / self.scale_x)
+            est = fit_point_cloud(src_pts, tgt_pts, rotate=True,
+                                  translate=False, scale=1, x0=x0)
+
+            self.scale_x = 1. / est[3]
+        else:
+            x0 = (self.rot_x, self.rot_y, self.rot_z, 1. / self.scale_x,
+                  1. / self.scale_y, 1. / self.scale_z)
+            est = fit_point_cloud(src_pts, tgt_pts, rotate=True,
+                                  translate=False, scale=3, x0=x0)
+            self.scale_x, self.scale_y, self.scale_z = 1. / est[3:]
+
+        self.rot_x, self.rot_y, self.rot_z = est[:3]
+
+    def get_scaling_job(self, subject_to):
+        desc = 'Scaling %s' % subject_to
+        func = scale_mri
+        args = (self.mri.subject, subject_to, self.scale)
+        kwargs = dict(overwrite=True, subjects_dir=self.mri.subjects_dir)
+        return (desc, func, args, kwargs)
+
+    def get_prepare_bem_model_job(self, subject_to):
+        subjects_dir = self.mri.subjects_dir
+        subject_from = self.mri.subject
+
+        bem_name = 'inner_skull'
+        bem_file = bem_fname.format(subjects_dir=subjects_dir,
+                                    subject=subject_from, name=bem_name)
+        if not os.path.exists(bem_file):
+            pattern = bem_fname.format(subjects_dir=subjects_dir,
+                                       subject=subject_to, name='(.+)')
+            bem_dir, bem_file = os.path.split(pattern)
+            m = None
+            bem_file_pattern = re.compile(bem_file)
+            for name in os.listdir(bem_dir):
+                m = bem_file_pattern.match(name)
+                if m is not None:
+                    break
+
+            if m is None:
+                pattern = bem_fname.format(subjects_dir=subjects_dir,
+                                           subject=subject_to, name='*')
+                err = ("No bem file found; looking for files matching "
+                       "%s" % pattern)
+                error(err)
+
+            bem_name = m.group(1)
+
+        bem_file = bem_fname.format(subjects_dir=subjects_dir,
+                                    subject=subject_to, name=bem_name)
+
+        # job
+        desc = 'mne_prepare_bem_model for %s' % subject_to
+        func = prepare_bem_model
+        args = (bem_file,)
+        kwargs = {}
+        return (desc, func, args, kwargs)
+
+    def load_trans(self, fname):
+        """Load the head-mri transform from a fif file
+
+        Parameters
+        ----------
+        fname : str
+            File path.
+        """
+        info = read_trans(fname)
+        head_mri_trans = info['trans']
+        self.set_trans(head_mri_trans)
+
+    def set_trans(self, head_mri_trans):
+        """Set rotation and translation parameters from a transformation matrix
+
+        Parameters
+        ----------
+        head_mri_trans : array, shape (4, 4)
+            Transformation matrix from head to MRI space.
+        """
+        x, y, z = -self.mri_origin[0]
+        mri_tgt_trans = translation(x, y, z)
+        head_tgt_trans = np.dot(mri_tgt_trans, head_mri_trans)
+
+        x, y, z = self.hsp.nasion[0]
+        src_hsp_trans = translation(x, y, z)
+        src_tgt_trans = np.dot(head_tgt_trans, src_hsp_trans)
+
+        rot_x, rot_y, rot_z = rotation_angles(src_tgt_trans[:3, :3])
+        x, y, z = src_tgt_trans[:3, 3]
+
+        self.rot_x = rot_x
+        self.rot_y = rot_y
+        self.rot_z = rot_z
+        self.trans_x = x
+        self.trans_y = y
+        self.trans_z = z
+
+    def save_trans(self, fname):
+        """Save the head-mri transform as a fif file
+
+        Parameters
+        ----------
+        fname : str
+            Target file path.
+        """
+        if not self.can_save:
+            raise RuntimeError("Not enough information for saving transform")
+        trans = self.head_mri_trans
+        dig = deepcopy(self.hsp.fid_dig)
+        for i in xrange(len(dig)):
+            dig[i]['r'] = apply_trans(trans, dig[i]['r'])
+        info = {'to': FIFF.FIFFV_COORD_MRI, 'from': FIFF.FIFFV_COORD_HEAD,
+                'trans': trans, 'dig': dig}
+        write_trans(fname, info)
+
+
+class CoregFrameHandler(Handler):
+    """Handler that checks for unfinished processes before closing its window
+    """
+    def close(self, info, is_ok):
+        if info.object.queue.unfinished_tasks:
+            msg = ("Can not close the window while saving is still in "
+                   "progress. Please wait until all MRIs are processed.")
+            title = "Saving Still in Progress"
+            information(None, msg, title)
+            return False
+        else:
+            return True
+
+
+class CoregPanel(HasPrivateTraits):
+    model = Instance(CoregModel)
+
+    # parameters
+    reset_params = Button(label='Reset')
+    n_scale_params = DelegatesTo('model')
+    scale_step = Float(1.01)
+    scale_x = DelegatesTo('model')
+    scale_x_dec = Button('-')
+    scale_x_inc = Button('+')
+    scale_y = DelegatesTo('model')
+    scale_y_dec = Button('-')
+    scale_y_inc = Button('+')
+    scale_z = DelegatesTo('model')
+    scale_z_dec = Button('-')
+    scale_z_inc = Button('+')
+    rot_step = Float(0.01)
+    rot_x = DelegatesTo('model')
+    rot_x_dec = Button('-')
+    rot_x_inc = Button('+')
+    rot_y = DelegatesTo('model')
+    rot_y_dec = Button('-')
+    rot_y_inc = Button('+')
+    rot_z = DelegatesTo('model')
+    rot_z_dec = Button('-')
+    rot_z_inc = Button('+')
+    trans_step = Float(0.001)
+    trans_x = DelegatesTo('model')
+    trans_x_dec = Button('-')
+    trans_x_inc = Button('+')
+    trans_y = DelegatesTo('model')
+    trans_y_dec = Button('-')
+    trans_y_inc = Button('+')
+    trans_z = DelegatesTo('model')
+    trans_z_dec = Button('-')
+    trans_z_inc = Button('+')
+
+    # fitting
+    has_fid_data = DelegatesTo('model')
+    has_pts_data = DelegatesTo('model')
+    # fitting with scaling
+    fits_hsp_points = Button(label='Fit Head Shape')
+    fits_fid = Button(label='Fit Fiducials')
+    fits_ap = Button(label='Fit LPA/RPA')
+    # fitting without scaling
+    fit_hsp_points = Button(label='Fit Head Shape')
+    fit_fid = Button(label='Fit Fiducials')
+    fit_ap = Button(label='Fit LPA/RPA')
+
+    # fit info
+    fid_eval_str = DelegatesTo('model')
+    points_eval_str = DelegatesTo('model')
+
+    # saving
+    can_save = DelegatesTo('model')
+    prepare_bem_model = Bool(True)
+    save = Button(label="Save As...")
+    load_trans = Button
+    queue = Instance(Queue, ())
+    queue_feedback = Str('')
+    queue_current = Str('')
+    queue_len = Int(0)
+    queue_len_str = Property(Str, depends_on=['queue_len'])
+    error = Str('')
+
+    view = View(VGroup(Item('n_scale_params', label='MRI Scaling',
+                            style='custom', show_label=True,
+                            editor=EnumEditor(values={0: '1:No Scaling',
+                                                      1: '2:1 Param',
+                                                      3: '3:3 Params'},
+                                              cols=3)),
+                       VGrid(Item('scale_x', editor=laggy_float_editor,
+                                  show_label=True, tooltip="Scale along "
+                                  "right-left axis",
+                                  enabled_when='n_scale_params > 0'),
+                             Item('scale_x_dec',
+                                  enabled_when='n_scale_params > 0'),
+                             Item('scale_x_inc',
+                                  enabled_when='n_scale_params > 0'),
+                             Item('scale_step', tooltip="Scaling step",
+                                  enabled_when='n_scale_params > 0'),
+                             Item('scale_y', editor=laggy_float_editor,
+                                  show_label=True,
+                                  enabled_when='n_scale_params > 1',
+                                  tooltip="Scale along anterior-posterior "
+                                  "axis"),
+                             Item('scale_y_dec',
+                                  enabled_when='n_scale_params > 1'),
+                             Item('scale_y_inc',
+                                  enabled_when='n_scale_params > 1'),
+                             Label('(Step)'),
+                             Item('scale_z', editor=laggy_float_editor,
+                                  show_label=True,
+                                  enabled_when='n_scale_params > 1',
+                                  tooltip="Scale along anterior-posterior "
+                                  "axis"),
+                             Item('scale_z_dec',
+                                  enabled_when='n_scale_params > 1'),
+                             Item('scale_z_inc',
+                                  enabled_when='n_scale_params > 1'),
+                             show_labels=False, columns=4),
+                       HGroup(Item('fits_hsp_points',
+                                   enabled_when='n_scale_params',
+                                   tooltip="Rotate the digitizer head shape "
+                                   "and scale the MRI so as to minimize the "
+                                   "distance from each digitizer point to the "
+                                   "closest MRI point"),
+                              Item('fits_ap',
+                                   enabled_when='n_scale_params == 1',
+                                   tooltip="While leaving the nasion in "
+                                   "place, rotate the digitizer head shape "
+                                   "and scale the MRI so as to minimize the "
+                                   "distance of the two auricular points"),
+                              Item('fits_fid',
+                                   enabled_when='n_scale_params == 1',
+                                   tooltip="Move and rotate the digitizer "
+                                   "head shape, and scale the MRI so as to "
+                                   "minimize the distance of the three "
+                                   "fiducials."),
+                              show_labels=False),
+                       '_',
+                       Label("Translation:"),
+                       VGrid(Item('trans_x', editor=laggy_float_editor,
+                                  show_label=True, tooltip="Move along "
+                                  "right-left axis"),
+                             'trans_x_dec', 'trans_x_inc',
+                             Item('trans_step', tooltip="Movement step"),
+                             Item('trans_y', editor=laggy_float_editor,
+                                  show_label=True, tooltip="Move along "
+                                  "anterior-posterior axis"),
+                             'trans_y_dec', 'trans_y_inc',
+                             Label('(Step)'),
+                             Item('trans_z', editor=laggy_float_editor,
+                                  show_label=True, tooltip="Move along "
+                                  "anterior-posterior axis"),
+                             'trans_z_dec', 'trans_z_inc',
+                             show_labels=False, columns=4),
+                       Label("Rotation:"),
+                       VGrid(Item('rot_x', editor=laggy_float_editor,
+                                  show_label=True, tooltip="Rotate along "
+                                  "right-left axis"),
+                             'rot_x_dec', 'rot_x_inc',
+                             Item('rot_step', tooltip="Rotation step"),
+                             Item('rot_y', editor=laggy_float_editor,
+                                  show_label=True, tooltip="Rotate along "
+                                  "anterior-posterior axis"),
+                             'rot_y_dec', 'rot_y_inc',
+                             Label('(Step)'),
+                             Item('rot_z', editor=laggy_float_editor,
+                                  show_label=True, tooltip="Rotate along "
+                                  "anterior-posterior axis"),
+                                  'rot_z_dec', 'rot_z_inc',
+                                  show_labels=False, columns=4),
+                       # buttons
+                       HGroup(Item('fit_hsp_points',
+                                   enabled_when='has_pts_data',
+                                   tooltip="Rotate the head shape (around the "
+                                   "nasion) so as to minimize the distance "
+                                   "from each head shape point to its closest "
+                                   "MRI point"),
+                              Item('fit_ap', enabled_when='has_fid_data',
+                                   tooltip="Try to match the LPA and the RPA, "
+                                   "leaving the Nasion in place"),
+                              Item('fit_fid', enabled_when='has_fid_data',
+                                   tooltip="Move and rotate the head shape so "
+                                   "as to minimize the distance between the "
+                                   "MRI and head shape fiducials"),
+                              Item('load_trans', enabled_when='has_fid_data'),
+                              show_labels=False),
+                       '_',
+                       Item('fid_eval_str', style='readonly'),
+                       Item('points_eval_str', style='readonly'),
+                       '_',
+                       HGroup(Item('prepare_bem_model'),
+                              Label("Run mne_prepare_bem_model"),
+                              show_labels=False,
+                              enabled_when='n_scale_params > 0'),
+                       HGroup(Item('save', enabled_when='can_save',
+                                   tooltip="Save the trans file and (if "
+                                   "scaling is enabled) the scaled MRI"),
+                              Item('reset_params', tooltip="Reset all "
+                                   "coregistration parameters"),
+                              show_labels=False),
+                       Item('queue_feedback', style='readonly'),
+                       Item('queue_current', style='readonly'),
+                       Item('queue_len_str', style='readonly'),
+                       show_labels=False),
+                kind='panel', buttons=[UndoButton])
+
+    def __init__(self, *args, **kwargs):
+        super(CoregPanel, self).__init__(*args, **kwargs)
+
+        # setup save worker
+        def worker():
+            while True:
+                desc, cmd, args, kwargs = self.queue.get()
+
+                self.queue_len -= 1
+                self.queue_current = 'Processing: %s' % desc
+
+                # task
+                try:
+                    cmd(*args, **kwargs)
+                except Exception as err:
+                    self.error = str(err)
+                    res = "Error in %s"
+                else:
+                    res = "Done: %s"
+
+                # finalize
+                self.queue_current = ''
+                self.queue_feedback = res % desc
+                self.queue.task_done()
+
+        t = Thread(target=worker)
+        t.daemon = True
+        t.start()
+
+    @cached_property
+    def _get_queue_len_str(self):
+        if self.queue_len:
+            return "Queue length: %i" % self.queue_len
+        else:
+            return ''
+
+    @cached_property
+    def _get_rotation(self):
+        rot = np.array([self.rot_x, self.rot_y, self.rot_z])
+        return rot
+
+    @cached_property
+    def _get_src_pts(self):
+        return self.hsp_pts - self.hsp_fid[0]
+
+    @cached_property
+    def _get_src_fid(self):
+        return self.hsp_fid - self.hsp_fid[0]
+
+    @cached_property
+    def _get_tgt_origin(self):
+        return self.mri_fid[0] * self.scale
+
+    @cached_property
+    def _get_tgt_pts(self):
+        pts = self.mri_pts * self.scale
+        pts -= self.tgt_origin
+        return pts
+
+    @cached_property
+    def _get_tgt_fid(self):
+        fid = self.mri_fid * self.scale
+        fid -= self.tgt_origin
+        return fid
+
+    @cached_property
+    def _get_translation(self):
+        trans = np.array([self.trans_x, self.trans_y, self.trans_z])
+        return trans
+
+    def _fit_ap_fired(self):
+        GUI.set_busy()
+        self.model.fit_auricular_points()
+        GUI.set_busy(False)
+
+    def _fit_fid_fired(self):
+        GUI.set_busy()
+        self.model.fit_fiducials()
+        GUI.set_busy(False)
+
+    def _fit_hsp_points_fired(self):
+        GUI.set_busy()
+        self.model.fit_hsp_points()
+        GUI.set_busy(False)
+
+    def _fits_ap_fired(self):
+        GUI.set_busy()
+        self.model.fit_scale_auricular_points()
+        GUI.set_busy(False)
+
+    def _fits_fid_fired(self):
+        GUI.set_busy()
+        self.model.fit_scale_fiducials()
+        GUI.set_busy(False)
+
+    def _fits_hsp_points_fired(self):
+        GUI.set_busy()
+        self.model.fit_scale_hsp_points()
+        GUI.set_busy(False)
+
+    def _n_scale_params_changed(self, new):
+        if not new:
+            return
+
+        # Make sure that MNE_ROOT environment variable is set
+        if not assert_env_set(mne_root=True):
+            err = ("MNE_ROOT environment variable could not be set. "
+                   "You will be able to scale MRIs, but the preparatory mne "
+                   "tools will fail. Please specify the MNE_ROOT environment "
+                   "variable. In Python this can be done using:\n\n"
+                   ">>> os.environ['MNE_ROOT'] = '/Applications/mne-2.7.3'")
+            warning(None, err, "MNE_ROOT Not Set")
+
+    def _reset_params_fired(self):
+        self.reset_traits(('n_scaling_params', 'scale_x', 'scale_y', 'scale_z',
+                           'rot_x', 'rot_y', 'rot_z', 'trans_x', 'trans_y',
+                           'trans_z'))
+
+    def _rot_x_dec_fired(self):
+        self.rot_x -= self.rot_step
+
+    def _rot_x_inc_fired(self):
+        self.rot_x += self.rot_step
+
+    def _rot_y_dec_fired(self):
+        self.rot_y -= self.rot_step
+
+    def _rot_y_inc_fired(self):
+        self.rot_y += self.rot_step
+
+    def _rot_z_dec_fired(self):
+        self.rot_z -= self.rot_step
+
+    def _rot_z_inc_fired(self):
+        self.rot_z += self.rot_step
+
+    def _load_trans_fired(self):
+        # find trans file destination
+        raw_dir = os.path.dirname(self.model.hsp.file)
+        subject = self.model.mri.subject
+        trans_file = trans_fname.format(raw_dir=raw_dir, subject=subject)
+        dlg = FileDialog(action="open", wildcard=trans_wildcard,
+                         default_path=trans_file)
+        dlg.open()
+        if dlg.return_code != OK:
+            return
+        trans_file = dlg.path
+        self.model.load_trans(trans_file)
+
+    def _save_fired(self):
+        if self.n_scale_params:
+            subjects_dir = self.model.mri.subjects_dir
+            subject_from = self.model.mri.subject
+            subject_to = self.model.raw_subject or self.model.mri.subject
+        else:
+            subject_to = self.model.mri.subject
+
+        # ask for target subject
+        if self.n_scale_params:
+            mridlg = NewMriDialog(subjects_dir=subjects_dir,
+                                  subject_from=subject_from,
+                                  subject_to=subject_to)
+            ui = mridlg.edit_traits(kind='modal')
+            if ui.result != True:
+                return
+            subject_to = mridlg.subject_to
+
+        # find bem file to run mne_prepare_bem_model
+        if self.n_scale_params and self.prepare_bem_model:
+            bem_job = self.model.get_prepare_bem_model_job(subject_to)
+        else:
+            bem_job = None
+
+        # find trans file destination
+        raw_dir = os.path.dirname(self.model.hsp.file)
+        trans_file = trans_fname.format(raw_dir=raw_dir, subject=subject_to)
+        dlg = FileDialog(action="save as", wildcard=trans_wildcard,
+                         default_path=trans_file)
+        dlg.open()
+        if dlg.return_code != OK:
+            return
+        trans_file = dlg.path
+        if not trans_file.endswith('.fif'):
+            trans_file = trans_file + '.fif'
+            if os.path.exists(trans_file):
+                answer = confirm(None, "The file %r already exists. Should it "
+                                 "be replaced?", "Overwrite File?")
+                if answer != YES:
+                    return
+
+        # save the trans file
+        try:
+            self.model.save_trans(trans_file)
+        except Exception as e:
+            error(None, str(e), "Error Saving Trans File")
+            return
+
+        # save the scaled MRI
+        if self.n_scale_params:
+            job = self.model.get_scaling_job(subject_to)
+            self.queue.put(job)
+            self.queue_len += 1
+
+            if bem_job is not None:
+                self.queue.put(bem_job)
+                self.queue_len += 1
+
+    def _scale_x_dec_fired(self):
+        step = 1. / self.scale_step
+        self.scale_x *= step
+
+    def _scale_x_inc_fired(self):
+        self.scale_x *= self.scale_step
+
+    def _scale_x_changed(self, old, new):
+        if self.n_scale_params == 1:
+            self.scale_y = new
+            self.scale_z = new
+
+    def _scale_y_dec_fired(self):
+        step = 1. / self.scale_step
+        self.scale_y *= step
+
+    def _scale_y_inc_fired(self):
+        self.scale_y *= self.scale_step
+
+    def _scale_z_dec_fired(self):
+        step = 1. / self.scale_step
+        self.scale_x *= step
+
+    def _scale_z_inc_fired(self):
+        self.scale_x *= self.scale_step
+
+    def _trans_x_dec_fired(self):
+        self.trans_x -= self.trans_step
+
+    def _trans_x_inc_fired(self):
+        self.trans_x += self.trans_step
+
+    def _trans_y_dec_fired(self):
+        self.trans_y -= self.trans_step
+
+    def _trans_y_inc_fired(self):
+        self.trans_y += self.trans_step
+
+    def _trans_z_dec_fired(self):
+        self.trans_z -= self.trans_step
+
+    def _trans_z_inc_fired(self):
+        self.trans_z += self.trans_step
+
+
+class NewMriDialog(HasPrivateTraits):
+    # Dialog to determine target subject name for a scaled MRI
+    subjects_dir = Directory
+    subject_to = Str
+    subject_from = Str
+    subject_to_dir = Property(depends_on=['subjects_dir', 'subject_to'])
+    subject_to_exists = Property(Bool, depends_on='subject_to_dir')
+
+    feedback = Str(' ' * 100)
+    can_overwrite = Bool
+    overwrite = Bool
+    can_save = Bool
+
+    view = View(Item('subject_to', label='New MRI Subject Name', tooltip="A "
+                     "new folder with this name will be created in the "
+                     "current subjects_dir for the scaled MRI files"),
+                Item('feedback', show_label=False, style='readonly'),
+                Item('overwrite', enabled_when='can_overwrite', tooltip="If a "
+                     "subject with the chosen name exists, delete the old "
+                     "subject"),
+                width=500,
+                buttons=[CancelButton,
+                           Action(name='OK', enabled_when='can_save')])
+
+    def _can_overwrite_changed(self, new):
+        if not new:
+            self.overwrite = False
+
+    @cached_property
+    def _get_subject_to_dir(self):
+        return os.path.join(self.subjects_dir, self.subject_to)
+
+    @cached_property
+    def _get_subject_to_exists(self):
+        if not self.subject_to:
+            return False
+        elif os.path.exists(self.subject_to_dir):
+            return True
+        else:
+            return False
+
+    @on_trait_change('subject_to_dir,overwrite')
+    def update_dialog(self):
+        if not self.subject_to:
+            self.feedback = "No subject specified..."
+            self.can_save = False
+            self.can_overwrite = False
+        elif self.subject_to == self.subject_from:
+            self.feedback = "Must be different from MRI source subject..."
+            self.can_save = False
+            self.can_overwrite = False
+        elif self.subject_to_exists:
+            if self.overwrite:
+                self.feedback = "%s will be overwritten." % self.subject_to
+                self.can_save = True
+                self.can_overwrite = True
+            else:
+                self.feedback = "Subject already exists..."
+                self.can_save = False
+                self.can_overwrite = True
+        else:
+            self.feedback = "Name ok."
+            self.can_save = True
+            self.can_overwrite = False
+
+
+def _make_view(tabbed=False, split=False, scene_width=-1):
+    """Create a view for the CoregFrame
+
+    Parameters
+    ----------
+    tabbed : bool
+        Combine the data source panel and the coregistration panel into a
+        single panel with tabs.
+    split : bool
+        Split the main panels with a movable splitter (good for QT4 but
+        unnecessary for wx backend).
+    scene_width : int
+        Specify a minimum width for the 3d scene (in pixels).
+
+    returns
+    -------
+    view : traits View
+        View object for the CoregFrame.
+    """
+    view_options = VGroup(Item('headview', style='custom'), 'view_options',
+                          show_border=True, show_labels=False, label='View')
+
+    scene = VGroup(Item('scene', show_label=False,
+                        editor=SceneEditor(scene_class=MayaviScene),
+                        dock='vertical', width=500),
+                   view_options)
+
+    data_panel = VGroup(VGroup(Item('subject_panel', style='custom'),
+                               label="MRI Subject", show_border=True,
+                               show_labels=False),
+                        VGroup(Item('lock_fiducials', style='custom',
+                                    editor=EnumEditor(cols=2,
+                                                      values={False: '2:Edit',
+                                                              True: '1:Lock'}),
+                                    enabled_when='fid_ok'),
+                               HGroup('hsp_always_visible',
+                                      Label("Always Show Head Shape Points"),
+                                      show_labels=False),
+                               Item('fid_panel', style='custom'),
+                               label="MRI Fiducials", show_border=True,
+                               show_labels=False),
+                        VGroup(Item('raw_src', style="custom"),
+                               HGroup(Item('distance', show_label=True),
+                                      'omit_points', 'reset_omit_points',
+                                      show_labels=False),
+                               Item('omitted_info', style='readonly',
+                                    show_label=False),
+                               label='Head Shape Source (Raw)',
+                               show_border=True, show_labels=False),
+                        show_labels=False, label="Data Source")
+
+    coreg_panel = VGroup(Item('coreg_panel', style='custom'),
+                         label="Coregistration", show_border=True,
+                         show_labels=False,
+                         enabled_when="fid_panel.locked")
+
+    if split:
+        main_layout = 'split'
+    else:
+        main_layout = 'normal'
+
+    if tabbed:
+        main = HGroup(scene,
+                      Group(data_panel, coreg_panel, show_labels=False,
+                            layout='tabbed'),
+                      layout=main_layout)
+    else:
+        main = HGroup(data_panel, scene, coreg_panel, show_labels=False,
+                      layout=main_layout)
+
+    view = View(main, resizable=True, handler=CoregFrameHandler(),
+                buttons=NoButtons)
+    return view
+
+
+class ViewOptionsPanel(HasTraits):
+    mri_obj = Instance(SurfaceObject)
+    hsp_obj = Instance(PointObject)
+    view = View(VGroup(Item('mri_obj', style='custom',  # show_border=True,
+                            label="MRI Head Surface"),
+                       Item('hsp_obj', style='custom',  # show_border=True,
+                            label="Head Shape Points")),
+                title="View Options")
+
+
+class CoregFrame(HasTraits):
+    """GUI for head-MRI coregistration
+    """
+    model = Instance(CoregModel, ())
+
+    scene = Instance(MlabSceneModel, ())
+    headview = Instance(HeadViewController)
+
+    subject_panel = Instance(SubjectSelectorPanel)
+    fid_panel = Instance(FiducialsPanel)
+    coreg_panel = Instance(CoregPanel)
+    raw_src = DelegatesTo('model', 'hsp')
+
+    # Omit Points
+    distance = Float(5., label="Distance [mm]", desc="Maximal distance for "
+                     "head shape points from MRI in mm")
+    omit_points = Button(label='Omit Points', desc="Omit head shape points "
+                         "for the purpose of the automatic coregistration "
+                         "procedure.")
+    reset_omit_points = Button(label='Reset Omission', desc="Reset the "
+                               "omission of head shape points to include all.")
+    omitted_info = Property(Str, depends_on=['model.hsp.n_omitted'])
+
+    fid_ok = DelegatesTo('model', 'mri.fid_ok')
+    lock_fiducials = DelegatesTo('model')
+    hsp_always_visible = Bool(False, label="Always Show Head Shape")
+
+    # visualization
+    hsp_obj = Instance(PointObject)
+    mri_obj = Instance(SurfaceObject)
+    lpa_obj = Instance(PointObject)
+    nasion_obj = Instance(PointObject)
+    rpa_obj = Instance(PointObject)
+    hsp_lpa_obj = Instance(PointObject)
+    hsp_nasion_obj = Instance(PointObject)
+    hsp_rpa_obj = Instance(PointObject)
+    hsp_visible = Property(depends_on=['hsp_always_visible', 'lock_fiducials'])
+
+    view_options = Button(label="View Options")
+
+    picker = Instance(object)
+
+    view_options_panel = Instance(ViewOptionsPanel)
+
+    # Processing
+    queue = DelegatesTo('coreg_panel')
+
+    view = _make_view()
+
+    def _subject_panel_default(self):
+        return SubjectSelectorPanel(model=self.model.mri.subject_source)
+
+    def _fid_panel_default(self):
+        panel = FiducialsPanel(model=self.model.mri, headview=self.headview)
+        return panel
+
+    def _coreg_panel_default(self):
+        panel = CoregPanel(model=self.model)
+        return panel
+
+    def _headview_default(self):
+        return HeadViewController(scene=self.scene, system='RAS')
+
+    def __init__(self, raw=None, subject=None, subjects_dir=None):
+        super(CoregFrame, self).__init__()
+
+        subjects_dir = get_subjects_dir(subjects_dir)
+        if (subjects_dir is not None) and os.path.isdir(subjects_dir):
+            self.model.mri.subjects_dir = subjects_dir
+
+        if subject is not None:
+            self.model.mri.subject = subject
+
+        if raw is not None:
+            self.model.hsp.file = raw
+
+    @on_trait_change('scene.activated')
+    def _init_plot(self):
+        self.scene.disable_render = True
+
+        lpa_color = defaults['lpa_color']
+        nasion_color = defaults['nasion_color']
+        rpa_color = defaults['rpa_color']
+
+        # MRI scalp
+        color = defaults['mri_color']
+        self.mri_obj = SurfaceObject(points=self.model.mri.points, color=color,
+                                     tri=self.model.mri.tris, scene=self.scene)
+        # on_trait_change was unreliable, so link it another way:
+        self.model.mri.on_trait_change(self._on_mri_src_change, 'tris')
+        self.model.sync_trait('scale', self.mri_obj, 'trans', mutual=False)
+        self.fid_panel.hsp_obj = self.mri_obj
+
+        # MRI Fiducials
+        point_scale = defaults['mri_fid_scale']
+        self.lpa_obj = PointObject(scene=self.scene, color=lpa_color,
+                                   point_scale=point_scale)
+        self.model.mri.sync_trait('lpa', self.lpa_obj, 'points', mutual=False)
+        self.model.sync_trait('scale', self.lpa_obj, 'trans', mutual=False)
+
+        self.nasion_obj = PointObject(scene=self.scene, color=nasion_color,
+                                      point_scale=point_scale)
+        self.model.mri.sync_trait('nasion', self.nasion_obj, 'points',
+                                  mutual=False)
+        self.model.sync_trait('scale', self.nasion_obj, 'trans', mutual=False)
+
+        self.rpa_obj = PointObject(scene=self.scene, color=rpa_color,
+                                   point_scale=point_scale)
+        self.model.mri.sync_trait('rpa', self.rpa_obj, 'points', mutual=False)
+        self.model.sync_trait('scale', self.rpa_obj, 'trans', mutual=False)
+
+        # Digitizer Head Shape
+        color = defaults['hsp_point_color']
+        point_scale = defaults['hsp_points_scale']
+        p = PointObject(view='cloud', scene=self.scene, color=color,
+                        point_scale=point_scale, resolution=5)
+        self.hsp_obj = p
+        self.model.hsp.sync_trait('points', p, mutual=False)
+        self.model.sync_trait('head_mri_trans', p, 'trans', mutual=False)
+        self.sync_trait('hsp_visible', p, 'visible', mutual=False)
+
+        # Digitizer Fiducials
+        point_scale = defaults['hsp_fid_scale']
+        opacity = defaults['hsp_fid_opacity']
+        p = PointObject(scene=self.scene, color=lpa_color, opacity=opacity,
+                        point_scale=point_scale)
+        self.hsp_lpa_obj = p
+        self.model.hsp.sync_trait('lpa', p, 'points', mutual=False)
+        self.model.sync_trait('head_mri_trans', p, 'trans', mutual=False)
+        self.sync_trait('hsp_visible', p, 'visible', mutual=False)
+
+        p = PointObject(scene=self.scene, color=nasion_color, opacity=opacity,
+                        point_scale=point_scale)
+        self.hsp_nasion_obj = p
+        self.model.hsp.sync_trait('nasion', p, 'points', mutual=False)
+        self.model.sync_trait('head_mri_trans', p, 'trans', mutual=False)
+        self.sync_trait('hsp_visible', p, 'visible', mutual=False)
+
+        p = PointObject(scene=self.scene, color=rpa_color, opacity=opacity,
+                        point_scale=point_scale)
+        self.hsp_rpa_obj = p
+        self.model.hsp.sync_trait('rpa', p, 'points', mutual=False)
+        self.model.sync_trait('head_mri_trans', p, 'trans', mutual=False)
+        self.sync_trait('hsp_visible', p, 'visible', mutual=False)
+
+        on_pick = self.scene.mayavi_scene.on_mouse_pick
+        self.picker = on_pick(self.fid_panel._on_pick, type='cell')
+
+        self.headview.left = True
+        self.scene.disable_render = False
+
+        self.view_options_panel = ViewOptionsPanel(mri_obj=self.mri_obj,
+                                                   hsp_obj=self.hsp_obj)
+
+    @cached_property
+    def _get_hsp_visible(self):
+        return self.hsp_always_visible or self.lock_fiducials
+
+    @cached_property
+    def _get_omitted_info(self):
+        if self.model.hsp.n_omitted == 0:
+            return "No points omitted"
+        elif self.model.hsp.n_omitted == 1:
+            return "1 point omitted"
+        else:
+            return "%i points omitted" % self.model.hsp.n_omitted
+
+    def _omit_points_fired(self):
+        distance = self.distance / 1000.
+        self.model.omit_hsp_points(distance)
+
+    def _reset_omit_points_fired(self):
+        self.model.omit_hsp_points(0, True)
+
+    @on_trait_change('model.mri.tris')
+    def _on_mri_src_change(self):
+        if self.mri_obj is None:
+            return
+        if not (np.any(self.model.mri.points) and np.any(self.model.mri.tris)):
+            self.mri_obj.clear()
+            return
+
+        self.mri_obj.points = self.model.mri.points
+        self.mri_obj.tri = self.model.mri.tris
+        self.mri_obj.plot()
+
+    # automatically lock fiducials if a good fiducials file is loaded
+    @on_trait_change('model.mri.fid_file')
+    def _on_fid_file_loaded(self):
+        if self.model.mri.fid_file:
+            self.fid_panel.locked = True
+        else:
+            self.fid_panel.locked = False
+
+    def _view_options_fired(self):
+        self.view_options_panel.edit_traits()
diff --git a/mne/gui/_fiducials_gui.py b/mne/gui/_fiducials_gui.py
new file mode 100644
index 0000000..98fff2d
--- /dev/null
+++ b/mne/gui/_fiducials_gui.py
@@ -0,0 +1,462 @@
+"""Mayavi/traits GUI for setting MRI fiducials"""
+
+# Authors: Christian Brodbeck <christianbrodbeck at nyu.edu>
+#
+# License: BSD (3-clause)
+
+from glob import glob
+import os
+
+# allow import without traits
+try:
+    from mayavi.core.ui.mayavi_scene import MayaviScene
+    from mayavi.tools.mlab_scene_model import MlabSceneModel
+    import numpy as np
+    from pyface.api import confirm, FileDialog, OK, YES
+    from traits.api import (HasTraits, HasPrivateTraits, on_trait_change,
+                            cached_property, DelegatesTo, Event, Instance,
+                            Property, Array, Bool, Button, Enum)
+    from traitsui.api import HGroup, Item, VGroup, View
+    from traitsui.menu import NoButtons
+    from tvtk.pyface.scene_editor import SceneEditor
+except:
+    from ..utils import trait_wraith
+    HasTraits = object
+    HasPrivateTraits = object
+    cached_property = trait_wraith
+    on_trait_change = trait_wraith
+    MayaviScene = trait_wraith
+    MlabSceneModel = trait_wraith
+    Array = trait_wraith
+    Bool = trait_wraith
+    Button = trait_wraith
+    DelegatesTo = trait_wraith
+    Enum = trait_wraith
+    Event = trait_wraith
+    Instance = trait_wraith
+    Property = trait_wraith
+    View = trait_wraith
+    Item = trait_wraith
+    HGroup = trait_wraith
+    VGroup = trait_wraith
+    SceneEditor = trait_wraith
+    NoButtons = trait_wraith
+
+from ..coreg import fid_fname, fid_fname_general, head_bem_fname
+from ..fiff import FIFF, write_fiducials
+from ..utils import get_subjects_dir, logger
+from ._file_traits import (BemSource, fid_wildcard, FiducialsSource,
+                           MRISubjectSource, SubjectSelectorPanel)
+from ._viewer import (defaults, HeadViewController, PointObject, SurfaceObject,
+                      headview_borders)
+
+
+class MRIHeadWithFiducialsModel(HasPrivateTraits):
+    """Represent an MRI head shape with fiducials
+
+    Attributes
+    ----------
+    points : array (n_points, 3)
+        MRI head surface points.
+    tris : array (n_tris, 3)
+        Triangles based on points.
+    lpa : array (1, 3)
+        Left peri-auricular point coordinates.
+    nasion : array (1, 3)
+        Nasion coordinates.
+    rpa : array (1, 3)
+        Right peri-auricular point coordinates.
+    """
+    subject_source = Instance(MRISubjectSource, ())
+    bem = Instance(BemSource, ())
+    fid = Instance(FiducialsSource, ())
+
+    fid_file = DelegatesTo('fid', 'file')
+    fid_fname = DelegatesTo('fid', 'fname')
+    fid_points = DelegatesTo('fid', 'points')
+    subjects_dir = DelegatesTo('subject_source')
+    subject = DelegatesTo('subject_source')
+    points = DelegatesTo('bem')
+    tris = DelegatesTo('bem')
+    lpa = Array(float, (1, 3))
+    nasion = Array(float, (1, 3))
+    rpa = Array(float, (1, 3))
+
+    reset = Event(desc="Reset fiducials to the file.")
+
+    # info
+    can_save = Property(depends_on=['file', 'can_save_as'])
+    can_save_as = Property(depends_on=['lpa', 'nasion', 'rpa'])
+    can_reset = Property(depends_on=['file', 'fid.points', 'lpa', 'nasion',
+                                     'rpa'])
+    fid_ok = Property(depends_on=['lpa', 'nasion', 'rpa'], desc="All points "
+                      "are set")
+    default_fid_fname = Property(depends_on=['subjects_dir', 'subject'],
+                                 desc="the default file name for the "
+                                 "fiducials fif file")
+
+    # switch for the GUI (has no effect in the model)
+    lock_fiducials = Bool(False, desc="Used by GIU, has no effect in the "
+                          "model.")
+
+    @on_trait_change('fid_points')
+    def reset_fiducials(self):
+        if self.fid_points is not None:
+            self.lpa = self.fid_points[0:1]
+            self.nasion = self.fid_points[1:2]
+            self.rpa = self.fid_points[2:3]
+
+    def save(self, fname=None):
+        """Save the current fiducials to a file
+
+        Parameters
+        ----------
+        fname : str
+            Destination file path. If None, will use the current fid filename
+            if available, or else use the default pattern.
+        """
+        if fname is None:
+            fname = self.fid_file
+        if not fname:
+            fname = self.default_fid_fname
+
+        dig = [{'kind': 1, 'ident': 1, 'r': np.array(self.lpa[0])},
+               {'kind': 1, 'ident': 2, 'r': np.array(self.nasion[0])},
+               {'kind': 1, 'ident': 3, 'r': np.array(self.rpa[0])}]
+        write_fiducials(fname, dig, FIFF.FIFFV_COORD_MRI)
+        self.fid_file = fname
+
+    @cached_property
+    def _get_can_reset(self):
+        if not self.fid_file:
+            return False
+        elif np.any(self.lpa != self.fid.points[0:1]):
+            return True
+        elif np.any(self.nasion != self.fid.points[1:2]):
+            return True
+        elif np.any(self.rpa != self.fid.points[2:3]):
+            return True
+        return False
+
+    @cached_property
+    def _get_can_save_as(self):
+        can = not (np.all(self.nasion == self.lpa)
+                   or np.all(self.nasion == self.rpa)
+                   or np.all(self.lpa == self.rpa))
+        return can
+
+    @cached_property
+    def _get_can_save(self):
+        if not self.can_save_as:
+            return False
+        elif self.fid_file:
+            return True
+        elif self.subjects_dir and self.subject:
+            return True
+        else:
+            return False
+
+    @cached_property
+    def _get_default_fid_fname(self):
+        fname = fid_fname.format(subjects_dir=self.subjects_dir,
+                                 subject=self.subject)
+        return fname
+
+    @cached_property
+    def _get_fid_ok(self):
+        return all(np.any(pt) for pt in (self.nasion, self.lpa, self.rpa))
+
+    def _reset_fired(self):
+        self.reset_fiducials()
+
+    # if subject changed because of a change of subjects_dir this was not
+    # triggered
+    @on_trait_change('subjects_dir,subject')
+    def _subject_changed(self):
+        subject = self.subject
+        subjects_dir = self.subjects_dir
+        if not subjects_dir or not subject:
+            return
+
+        # update bem head
+        path = head_bem_fname.format(subjects_dir=subjects_dir,
+                                     subject=subject)
+        self.bem.file = path
+
+        # find fiducials file
+        path = fid_fname.format(subjects_dir=subjects_dir, subject=subject)
+        if os.path.exists(path):
+            self.fid_file = path
+            self.lock_fiducials = True
+        else:
+            path = fid_fname_general.format(subjects_dir=subjects_dir,
+                                            subject=subject, head='*')
+            fnames = glob(path)
+            if fnames:
+                path = fnames[0]
+                self.fid.file = path
+                self.lock_fiducials = True
+            else:
+                self.fid.reset_traits(['file'])
+                self.lock_fiducials = False
+
+        # does not seem to happen by itself ... so hard code it:
+        self.reset_fiducials()
+
+
+class FiducialsPanel(HasPrivateTraits):
+    """Set fiducials on an MRI surface"""
+    model = Instance(MRIHeadWithFiducialsModel)
+
+    fid_file = DelegatesTo('model')
+    fid_fname = DelegatesTo('model')
+    lpa = DelegatesTo('model')
+    nasion = DelegatesTo('model')
+    rpa = DelegatesTo('model')
+    can_save = DelegatesTo('model')
+    can_save_as = DelegatesTo('model')
+    can_reset = DelegatesTo('model')
+    fid_ok = DelegatesTo('model')
+    locked = DelegatesTo('model', 'lock_fiducials')
+
+    set = Enum('LPA', 'Nasion', 'RPA')
+    current_pos = Array(float, (1, 3))  # for editing
+
+    save_as = Button(label='Save As...')
+    save = Button(label='Save')
+    reset_fid = Button(label="Reset to File")
+
+    headview = Instance(HeadViewController)
+    hsp_obj = Instance(SurfaceObject)
+
+    picker = Instance(object)
+
+    # the layout of the dialog created
+    view = View(VGroup(Item('fid_file', label='Fiducials File'),
+                       Item('fid_fname', show_label=False, style='readonly'),
+                       Item('set', style='custom'),
+                       Item('current_pos', label='Pos'),
+                       HGroup(Item('save', enabled_when='can_save',
+                                   tooltip="If a filename is currently "
+                                   "specified, save to that file, otherwise "
+                                   "save to the default file name"),
+                              Item('save_as', enabled_when='can_save_as'),
+                              Item('reset_fid', enabled_when='can_reset'),
+                              show_labels=False),
+                       enabled_when="locked==False"))
+
+    def __init__(self, *args, **kwargs):
+        super(FiducialsPanel, self).__init__(*args, **kwargs)
+        self.sync_trait('lpa', self, 'current_pos', mutual=True)
+
+    def _reset_fid_fired(self):
+        self.model.reset = True
+
+    def _save_fired(self):
+        self.model.save()
+
+    def _save_as_fired(self):
+        if self.fid_file:
+            default_path = self.fid_file
+        else:
+            default_path = self.model.default_fid_fname
+
+        dlg = FileDialog(action="save as", wildcard=fid_wildcard,
+                         default_path=default_path)
+        dlg.open()
+        if dlg.return_code != OK:
+            return
+
+        path = dlg.path
+        if not path.endswith('.fif'):
+            path = path + '.fif'
+            if os.path.exists(path):
+                answer = confirm(None, "The file %r already exists. Should it "
+                                 "be replaced?", "Overwrite File?")
+                if answer != YES:
+                    return
+
+        self.model.save(path)
+
+    def _on_pick(self, picker):
+        if self.locked:
+            return
+
+        self.picker = picker
+        n_pos = len(picker.picked_positions)
+
+        if n_pos == 0:
+            logger.debug("GUI: picked empty location")
+            return
+
+        if picker.actor is self.hsp_obj.surf.actor.actor:
+            idxs = []
+            idx = None
+            pt = [picker.pick_position]
+        elif self.hsp_obj.surf.actor.actor in picker.actors:
+            idxs = [i for i in xrange(n_pos) if picker.actors[i] is
+                    self.hsp_obj.surf.actor.actor]
+            idx = idxs[-1]
+            pt = [picker.picked_positions[idx]]
+        else:
+            logger.debug("GUI: picked object other than MRI")
+
+        round_ = lambda x: round(x, 3)
+        poss = [map(round_, pos) for pos in picker.picked_positions]
+        pos = map(round_, picker.pick_position)
+        msg = ["Pick Event: %i picked_positions:" % n_pos]
+
+        line = str(pos)
+        if idx is None:
+            line += " <-pick_position"
+        msg.append(line)
+
+        for i, pos in enumerate(poss):
+            line = str(pos)
+            if i == idx:
+                line += " <- MRI mesh"
+            elif i in idxs:
+                line += " (<- also MRI mesh)"
+            msg.append(line)
+        logger.debug(os.linesep.join(msg))
+
+        if self.set == 'Nasion':
+            self.nasion = pt
+        elif self.set == 'LPA':
+            self.lpa = pt
+        elif self.set == 'RPA':
+            self.rpa = pt
+        else:
+            raise ValueError("set = %r" % self.set)
+
+    @on_trait_change('set')
+    def _on_set_change(self, obj, name, old, new):
+        self.sync_trait(old.lower(), self, 'current_pos', mutual=True,
+                        remove=True)
+        self.sync_trait(new.lower(), self, 'current_pos', mutual=True)
+        if new == 'Nasion':
+            self.headview.front = True
+        elif new == 'LPA':
+            self.headview.left = True
+        elif new == 'RPA':
+            self.headview.right = True
+
+
+# FiducialsPanel view that allows manipulating all coordinates numerically
+view2 = View(VGroup(Item('fid_file', label='Fiducials File'),
+                    Item('fid_fname', show_label=False, style='readonly'),
+                    Item('set', style='custom'), 'lpa', 'nasion', 'rpa',
+                    HGroup(Item('save', enabled_when='can_save'),
+                           Item('save_as', enabled_when='can_save_as'),
+                           Item('reset_fid', enabled_when='can_reset'),
+                           show_labels=False),
+                    enabled_when="locked==False"))
+
+
+class FiducialsFrame(HasTraits):
+    """GUI for interpolating between two KIT marker files
+
+    Parameters
+    ----------
+    subject : None | str
+        Set the subject which is initially selected.
+    subjects_dir : None | str
+        Override the SUBJECTS_DIR environment variable.
+    """
+    model = Instance(MRIHeadWithFiducialsModel, ())
+
+    scene = Instance(MlabSceneModel, ())
+    headview = Instance(HeadViewController)
+
+    spanel = Instance(SubjectSelectorPanel)
+    panel = Instance(FiducialsPanel)
+
+    mri_obj = Instance(SurfaceObject)
+    point_scale = float(defaults['mri_fid_scale'])
+    lpa_obj = Instance(PointObject)
+    nasion_obj = Instance(PointObject)
+    rpa_obj = Instance(PointObject)
+
+    def _headview_default(self):
+        return HeadViewController(scene=self.scene, system='RAS')
+
+    def _panel_default(self):
+        panel = FiducialsPanel(model=self.model, headview=self.headview)
+        panel.trait_view('view', view2)
+        return panel
+
+    def _spanel_default(self):
+        return SubjectSelectorPanel(model=self.model.subject_source)
+
+    view = View(HGroup(Item('scene',
+                            editor=SceneEditor(scene_class=MayaviScene),
+                            dock='vertical'),
+                       VGroup(headview_borders,
+                              VGroup(Item('spanel', style='custom'),
+                                     label="Subject", show_border=True,
+                                     show_labels=False),
+                              VGroup(Item('panel', style="custom"),
+                                     label="Fiducials", show_border=True,
+                                     show_labels=False),
+                              show_labels=False),
+                       show_labels=False),
+                resizable=True,
+                buttons=NoButtons)
+
+    def __init__(self, subject=None, subjects_dir=None, **kwargs):
+        super(FiducialsFrame, self).__init__(**kwargs)
+
+        subjects_dir = get_subjects_dir(subjects_dir)
+        if subjects_dir is not None:
+            self.spanel.subjects_dir = subjects_dir
+
+        if subject is not None:
+            if subject in self.spanel.subjects:
+                self.spanel.subject = subject
+
+    @on_trait_change('scene.activated')
+    def _init_plot(self):
+        self.scene.disable_render = True
+
+        lpa_color = defaults['lpa_color']
+        nasion_color = defaults['nasion_color']
+        rpa_color = defaults['rpa_color']
+
+        # bem
+        color = defaults['mri_color']
+        self.mri_obj = SurfaceObject(points=self.model.points, color=color,
+                                     tri=self.model.tris, scene=self.scene)
+        self.model.on_trait_change(self._on_mri_src_change, 'tris')
+        self.panel.hsp_obj = self.mri_obj
+
+        # fiducials
+        self.lpa_obj = PointObject(scene=self.scene, color=lpa_color,
+                                   point_scale=self.point_scale)
+        self.panel.sync_trait('lpa', self.lpa_obj, 'points', mutual=False)
+        self.sync_trait('point_scale', self.lpa_obj, mutual=False)
+
+        self.nasion_obj = PointObject(scene=self.scene, color=nasion_color,
+                                      point_scale=self.point_scale)
+        self.panel.sync_trait('nasion', self.nasion_obj, 'points',
+                              mutual=False)
+        self.sync_trait('point_scale', self.nasion_obj, mutual=False)
+
+        self.rpa_obj = PointObject(scene=self.scene, color=rpa_color,
+                                   point_scale=self.point_scale)
+        self.panel.sync_trait('rpa', self.rpa_obj, 'points', mutual=False)
+        self.sync_trait('point_scale', self.rpa_obj, mutual=False)
+
+        self.headview.left = True
+        self.scene.disable_render = False
+
+        # picker
+        self.scene.mayavi_scene.on_mouse_pick(self.panel._on_pick, type='cell')
+
+    def _on_mri_src_change(self):
+        if (not np.any(self.model.points)) or (not np.any(self.model.tris)):
+            self.mri_obj.clear()
+            return
+
+        self.mri_obj.points = self.model.points
+        self.mri_obj.tri = self.model.tris
+        self.mri_obj.plot()
diff --git a/mne/gui/_file_traits.py b/mne/gui/_file_traits.py
new file mode 100644
index 0000000..a0e43a1
--- /dev/null
+++ b/mne/gui/_file_traits.py
@@ -0,0 +1,427 @@
+"""File data sources for traits GUIs"""
+
+# Authors: Christian Brodbeck <christianbrodbeck at nyu.edu>
+#
+# License: BSD (3-clause)
+
+import os
+
+import numpy as np
+
+# allow import without traits
+try:
+    from traits.api import (Any, HasTraits, HasPrivateTraits, cached_property,
+                            on_trait_change, Array, Bool, Button, DelegatesTo,
+                            Directory, Enum, Event, File, Instance, Int, List,
+                            Property, Str)
+    from traitsui.api import View, Item, VGroup
+    from pyface.api import (DirectoryDialog, OK, ProgressDialog, error,
+                            information)
+except:
+    from ..utils import trait_wraith
+    HasTraits = object
+    HasPrivateTraits = object
+    cached_property = trait_wraith
+    on_trait_change = trait_wraith
+    Any = trait_wraith
+    Array = trait_wraith
+    Bool = trait_wraith
+    Button = trait_wraith
+    DelegatesTo = trait_wraith
+    Directory = trait_wraith
+    Enum = trait_wraith
+    Event = trait_wraith
+    File = trait_wraith
+    Instance = trait_wraith
+    Int = trait_wraith
+    List = trait_wraith
+    Property = trait_wraith
+    Str = trait_wraith
+    View = trait_wraith
+    Item = trait_wraith
+    VGroup = trait_wraith
+
+from ..fiff import FIFF, Raw, read_fiducials
+from ..surface import read_bem_surfaces
+from ..coreg import _is_mri_subject, create_default_subject
+from ..utils import get_config
+
+
+fid_wildcard = "*.fif"
+trans_wildcard = "*.fif"
+# for wx backend:
+# fid_wildcard = "Fiducials FIFF file (*.fif)|*.fif"
+# trans_wildcard = "Trans File (*.fif)|*.fif"
+
+
+def _expand_path(p):
+    return os.path.abspath(os.path.expandvars(os.path.expanduser(p)))
+
+
+def assert_env_set(mne_root=True, fs_home=False):
+    """Make sure that environment variables are correctly set
+
+    Parameters
+    ----------
+    mne_root : bool
+        Make sure the MNE_ROOT environment variable is set correctly, and the
+        mne bin directory is in the PATH.
+    fs_home : bool
+        Make sure the FREESURFER_HOME environment variable is set correctly.
+
+    Returns
+    -------
+    success : bool
+        Whether the requested environment variables are successfully set or
+        not.
+
+    Notes
+    -----
+    Environment variables are added to ``os.environ`` to make sure that bash
+    tools can find them.
+    """
+    if fs_home:
+        fs_home = os.environ.get('FREESURFER_HOME', None)
+        test_dir = os.path.join('%s', 'subjects', 'fsaverage')
+        while (fs_home is None) or not os.path.exists(test_dir % fs_home):
+            msg = ("Please select the FREESURFER_HOME directory. This is the "
+                   "root directory of the freesurfer installation. In order "
+                   "to avoid this prompt in the future, set the "
+                   "FREESURFER_HOME environment variable. "
+                   "In Python, this can be done with:\n"
+                   ">>> os.environ['FREESURFER_HOME'] = path")
+            information(None, msg, "Select FREESURFER_HOME Directory")
+            msg = "Please select the FREESURFER_HOME Directory"
+            dlg = DirectoryDialog(message=msg, new_directory=False)
+            if dlg.open() == OK:
+                fs_home = dlg.path
+            else:
+                return False
+        os.environ['FREESURFER_HOME'] = fs_home
+
+    if mne_root:
+        mne_root = get_config('MNE_ROOT')
+        test_dir = os.path.join('%s', 'share', 'mne', 'mne_analyze')
+        while (mne_root is None) or not os.path.exists(test_dir % mne_root):
+            msg = ("Please select the MNE_ROOT directory. This is the root "
+                   "directory of the MNE installation. In order to "
+                   "avoid this prompt in the future, set the MNE_ROOT "
+                   "environment variable. "
+                   "In Python, this can be done with:\n"
+                   ">>> os.environ['MNE_ROOT'] = path")
+            information(None, msg, "Select MNE_ROOT Directory")
+            msg = "Please select the MNE_ROOT Directory"
+            dlg = DirectoryDialog(message=msg, new_directory=False)
+            if dlg.open() == OK:
+                mne_root = dlg.path
+            else:
+                return False
+        os.environ['MNE_ROOT'] = mne_root
+
+        # add mne bin directory to PATH
+        mne_bin = os.path.realpath(os.path.join(mne_root, 'bin'))
+        if mne_bin not in map(_expand_path, os.environ['PATH'].split(':')):
+            os.environ['PATH'] += ':' + mne_bin
+
+    return True
+
+
+class BemSource(HasTraits):
+    """Expose points and tris of a given BEM file
+
+    Parameters
+    ----------
+    file : File
+        Path to the BEM file (*.fif).
+
+    Attributes
+    ----------
+    pts : Array, shape = (n_pts, 3)
+        BEM file points.
+    tri : Array, shape = (n_tri, 3)
+        BEM file triangles.
+
+    Notes
+    -----
+    tri is always updated after pts, so in case downstream objects depend on
+    both, they should sync to a change in tri.
+    """
+    file = File(exists=True, filter=['*.fif'])
+    points = Array(shape=(None, 3), value=np.empty((0, 3)))
+    tris = Array(shape=(None, 3), value=np.empty((0, 3)))
+
+    @on_trait_change('file')
+    def read_file(self):
+        if os.path.exists(self.file):
+            bem = read_bem_surfaces(self.file)[0]
+            self.points = bem['rr']
+            self.tris = bem['tris']
+        else:
+            self.points = np.empty((0, 3))
+            self.tris = np.empty((0, 3))
+
+
+class FiducialsSource(HasTraits):
+    """Expose points of a given fiducials fif file
+
+    Parameters
+    ----------
+    file : File
+        Path to a fif file with fiducials (*.fif).
+
+    Attributes
+    ----------
+    points : Array, shape = (n_points, 3)
+        Fiducials file points.
+    """
+    file = File(filter=[fid_wildcard])
+    fname = Property(depends_on='file')
+    points = Property(depends_on='file')
+
+    @cached_property
+    def _get_fname(self):
+        fname = os.path.basename(self.file)
+        return fname
+
+    @cached_property
+    def _get_points(self):
+        if not os.path.exists(self.file):
+            return None
+
+        points = np.zeros((3, 3))
+        fids, _ = read_fiducials(self.file)
+        for fid in fids:
+            ident = fid['ident']
+            if ident == FIFF.FIFFV_POINT_LPA:
+                points[0] = fid['r']
+            elif ident == FIFF.FIFFV_POINT_NASION:
+                points[1] = fid['r']
+            elif ident == FIFF.FIFFV_POINT_RPA:
+                points[2] = fid['r']
+        return points
+
+
+class RawSource(HasPrivateTraits):
+    """Expose measurement information from a raw file
+
+    Parameters
+    ----------
+    file : File
+        Path to the BEM file (*.fif).
+
+    Attributes
+    ----------
+    fid : Array, shape = (3, 3)
+        Each row contains the coordinates for one fiducial point, in the order
+        Nasion, RAP, LAP. If no file is set all values are 0.
+    """
+    file = File(exists=True, filter=['*.fif'])
+
+    raw_fname = Property(Str, depends_on='file')
+    raw_dir = Property(depends_on='file')
+    raw = Property(depends_on='file')
+
+    points_filter = Any(desc="Index to select a subset of the head shape "
+                        "points")
+    n_omitted = Property(Int, depends_on=['points_filter'])
+
+    # head shape
+    raw_points = Property(depends_on='raw', desc="Head shape points in the "
+                          "raw file(n x 3 array)")
+    points = Property(depends_on=['raw_points', 'points_filter'], desc="Head "
+                      "shape points selected by the filter (n x 3 array)")
+
+    # fiducials
+    fid_dig = Property(depends_on='raw', desc="Fiducial points (list of dict)")
+    fid_points = Property(depends_on='fid_dig', desc="Fiducial points {ident: "
+                          "point} dict}")
+    lpa = Property(depends_on='fid_points', desc="LPA coordinates (1 x 3 "
+                   "array)")
+    nasion = Property(depends_on='fid_points', desc="Nasion coordinates (1 x "
+                      "3 array)")
+    rpa = Property(depends_on='fid_points', desc="RPA coordinates (1 x 3 "
+                   "array)")
+
+    view = View(VGroup(Item('file'),
+                       Item('raw_fname', show_label=False, style='readonly')))
+
+    @cached_property
+    def _get_n_omitted(self):
+        if self.points_filter is None:
+            return 0
+        else:
+            return np.sum(self.points_filter == False)
+
+    @cached_property
+    def _get_raw(self):
+        if self.file:
+            return Raw(self.file)
+
+    @cached_property
+    def _get_raw_dir(self):
+        return os.path.dirname(self.file)
+
+    @cached_property
+    def _get_raw_fname(self):
+        if self.file:
+            return os.path.basename(self.file)
+        else:
+            return '-'
+
+    @cached_property
+    def _get_raw_points(self):
+        if not self.raw:
+            return np.zeros((1, 3))
+
+        points = np.array([d['r'] for d in self.raw.info['dig']
+                           if d['kind'] == FIFF.FIFFV_POINT_EXTRA])
+        return points
+
+    @cached_property
+    def _get_points(self):
+        if self.points_filter is None:
+            return self.raw_points
+        else:
+            return self.raw_points[self.points_filter]
+
+    @cached_property
+    def _get_fid_dig(self):
+        """Fiducials for info['dig']"""
+        if not self.raw:
+            return []
+        dig = self.raw.info['dig']
+        dig = [d for d in dig if d['kind'] == FIFF.FIFFV_POINT_CARDINAL]
+        return dig
+
+    @cached_property
+    def _get_fid_points(self):
+        if not self.raw:
+            return {}
+        digs = {d['ident']: d for d in self.fid_dig}
+        return digs
+
+    @cached_property
+    def _get_nasion(self):
+        if self.fid_points:
+            return self.fid_points[FIFF.FIFFV_POINT_NASION]['r'][None, :]
+        else:
+            return np.zeros((1, 3))
+
+    @cached_property
+    def _get_lpa(self):
+        if self.fid_points:
+            return self.fid_points[FIFF.FIFFV_POINT_LPA]['r'][None, :]
+        else:
+            return np.zeros((1, 3))
+
+    @cached_property
+    def _get_rpa(self):
+        if self.fid_points:
+            return self.fid_points[FIFF.FIFFV_POINT_RPA]['r'][None, :]
+        else:
+            return np.zeros((1, 3))
+
+
+class MRISubjectSource(HasPrivateTraits):
+    """Find subjects in SUBJECTS_DIR and select one
+
+    Parameters
+    ----------
+    subjects_dir : directory
+        SUBJECTS_DIR.
+    subject : str
+        Subject, corresponding to a folder in SUBJECTS_DIR.
+    """
+    refresh = Event(desc="Refresh the subject list based on the directory "
+                    "structure of subjects_dir.")
+
+    # settings
+    subjects_dir = Directory(exists=True)
+    subjects = Property(List(Str), depends_on=['subjects_dir', 'refresh'])
+    subject = Enum(values='subjects')
+
+    # info
+    can_create_fsaverage = Property(Bool, depends_on=['subjects_dir',
+                                                      'subjects'])
+    bem_pattern = Property(depends_on='mri_dir')
+
+    @cached_property
+    def _get_can_create_fsaverage(self):
+        if not os.path.exists(self.subjects_dir):
+            return False
+        if 'fsaverage' in self.subjects:
+            return False
+        return True
+
+    @cached_property
+    def _get_mri_dir(self):
+        if not self.subject:
+            return
+        elif not self.subjects_dir:
+            return
+        else:
+            return os.path.join(self.subjects_dir, self.subject)
+
+    @cached_property
+    def _get_subjects(self):
+        sdir = self.subjects_dir
+        is_dir = sdir and os.path.isdir(sdir)
+        if is_dir:
+            dir_content = os.listdir(sdir)
+            subjects = [s for s in dir_content if _is_mri_subject(s, sdir)]
+            if len(subjects) == 0:
+                subjects.append('')
+        else:
+            subjects = ['']
+
+        return subjects
+
+    def create_fsaverage(self):
+        if not self.subjects_dir:
+            err = ("No subjects diretory is selected. Please specify "
+                   "subjects_dir first.")
+            raise RuntimeError(err)
+
+        if not assert_env_set(mne_root=True, fs_home=True):
+            err = ("Not all files required for creating the fsaverage brain "
+                   "were found. Both mne and freesurfer are required.")
+            raise RuntimeError(err)
+
+        create_default_subject(subjects_dir=self.subjects_dir)
+        self.refresh = True
+        self.subject = 'fsaverage'
+
+
+class SubjectSelectorPanel(HasPrivateTraits):
+    model = Instance(MRISubjectSource)
+
+    can_create_fsaverage = DelegatesTo('model')
+    subjects_dir = DelegatesTo('model')
+    subject = DelegatesTo('model')
+    subjects = DelegatesTo('model')
+
+    create_fsaverage = Button("Copy FsAverage to Subjects Folder",
+                              desc="Copy the files for the fsaverage subject "
+                              "to the subjects directory.")
+
+    view = View(VGroup(Item('subjects_dir', label='subjects_dir'),
+                       'subject',
+                       Item('create_fsaverage', show_label=False,
+                            enabled_when='can_create_fsaverage')))
+
+    def _create_fsaverage_fired(self):
+        # progress dialog with indefinite progress bar
+        title = "Creating FsAverage ..."
+        message = "Copying fsaverage files ..."
+        prog = ProgressDialog(title=title, message=message)
+        prog.open()
+        prog.update(0)
+
+        try:
+            self.model.create_fsaverage()
+        except Exception as err:
+            msg = str(err)
+            error(None, msg, "Error Creating FsAverage")
+            raise
+        finally:
+            prog.close()
diff --git a/mne/gui/_kit2fiff_gui.py b/mne/gui/_kit2fiff_gui.py
new file mode 100644
index 0000000..3b09a01
--- /dev/null
+++ b/mne/gui/_kit2fiff_gui.py
@@ -0,0 +1,518 @@
+"""Mayavi/traits GUI for converting data from KIT systems"""
+
+# Authors: Christian Brodbeck <christianbrodbeck at nyu.edu>
+#
+# License: BSD (3-clause)
+
+import os
+from Queue import Queue
+from threading import Thread
+
+import numpy as np
+from scipy.linalg import inv
+
+# allow import without traits
+try:
+    from mayavi.core.ui.mayavi_scene import MayaviScene
+    from mayavi.tools.mlab_scene_model import MlabSceneModel
+    from pyface.api import confirm, error, FileDialog, OK, YES, information
+    from traits.api import (HasTraits, HasPrivateTraits, cached_property,
+                            Instance, Property, Bool, Button, Enum, File, Int,
+                            List, Str, DelegatesTo)
+    from traitsui.api import (View, Item, HGroup, VGroup, spring,
+                              CheckListEditor, EnumEditor, Handler)
+    from traitsui.menu import NoButtons
+    from tvtk.pyface.scene_editor import SceneEditor
+except:
+    from ..utils import trait_wraith
+    HasTraits = object
+    HasPrivateTraits = object
+    Handler = object
+    cached_property = trait_wraith
+    MayaviScene = trait_wraith
+    MlabSceneModel = trait_wraith
+    Bool = trait_wraith
+    Button = trait_wraith
+    DelegatesTo = trait_wraith
+    Enum = trait_wraith
+    File = trait_wraith
+    Instance = trait_wraith
+    Int = trait_wraith
+    List = trait_wraith
+    Property = trait_wraith
+    Str = trait_wraith
+    View = trait_wraith
+    Item = trait_wraith
+    HGroup = trait_wraith
+    VGroup = trait_wraith
+    EnumEditor = trait_wraith
+    NoButtons = trait_wraith
+    CheckListEditor = trait_wraith
+    SceneEditor = trait_wraith
+
+from ..fiff.kit.coreg import read_hsp, read_elp
+from ..fiff.kit.kit import RawKIT, KIT
+from ..transforms import apply_trans, als_ras_trans, als_ras_trans_mm
+from ..coreg import (_decimate_points, fit_matched_points,
+                     get_ras_to_neuromag_trans)
+from ._marker_gui import CombineMarkersPanel, CombineMarkersModel
+from ._viewer import HeadViewController, headview_item, PointObject
+
+
+use_editor = CheckListEditor(cols=5, values=[(i, str(i)) for i in xrange(5)])
+backend_is_wx = False  # is there a way to determine this?
+if backend_is_wx:
+    # wx backend allows labels for wildcards
+    hsp_points_wildcard = ['Head Shape Points (*.txt)|*.txt']
+    hsp_fid_wildcard = ['Head Shape Fiducials (*.txt)|*.txt']
+    kit_con_wildcard = ['Continuous KIT Files (*.sqd;*.con)|*.sqd;*.con']
+else:
+    hsp_points_wildcard = ['*.txt']
+    hsp_fid_wildcard = ['*.txt']
+    kit_con_wildcard = ['*.sqd;*.con']
+
+
+class Kit2FiffModel(HasPrivateTraits):
+    """Data Model for Kit2Fiff conversion
+
+     - Markers are transformed into RAS coordinate system (as are the sensor
+       coordinates).
+     - Head shape digitizer data is transformed into neuromag-like space.
+
+    """
+    # Input Traits
+    markers = Instance(CombineMarkersModel, ())
+    sqd_file = File(exists=True, filter=kit_con_wildcard)
+    hsp_file = File(exists=True, filter=hsp_points_wildcard, desc="Digitizer "
+                    "head shape")
+    fid_file = File(exists=True, filter=hsp_fid_wildcard, desc="Digitizer "
+                    "fiducials")
+    stim_chs = Enum(">", "<")
+    stim_slope = Enum("-", "+")
+    # Marker Points
+    use_mrk = List(range(5), desc="Which marker points to use for the device "
+                   "head coregistration.")
+
+    # Derived Traits
+    mrk = Property(depends_on=('markers.mrk3.points'))
+
+    # Polhemus Fiducials
+    elp_raw = Property(depends_on=['fid_file'])
+    hsp_raw = Property(depends_on=['hsp_file'])
+    polhemus_neuromag_trans = Property(depends_on=['elp_raw'])
+
+    # Polhemus data (in neuromag space)
+    elp = Property(depends_on=['elp_raw', 'polhemus_neuromag_trans'])
+    fid = Property(depends_on=['elp_raw', 'polhemus_neuromag_trans'])
+    hsp = Property(depends_on=['hsp_raw', 'polhemus_neuromag_trans'])
+
+    # trans
+    dev_head_trans = Property(depends_on=['elp', 'mrk', 'use_mrk'])
+    head_dev_trans = Property(depends_on=['dev_head_trans'])
+
+    # info
+    sqd_fname = Property(Str, depends_on='sqd_file')
+    hsp_fname = Property(Str, depends_on='hsp_file')
+    fid_fname = Property(Str, depends_on='fid_file')
+    can_save = Property(Bool, depends_on=['sqd_file', 'fid', 'elp', 'hsp',
+                                          'dev_head_trans'])
+
+    @cached_property
+    def _get_can_save(self):
+        "Only allow saving when either all or no head shape elements are set."
+        has_sqd = bool(self.sqd_file)
+        if not has_sqd:
+            return False
+
+        has_all_hsp = (np.any(self.dev_head_trans) and np.any(self.hsp)
+                       and np.any(self.elp) and np.any(self.fid))
+        if has_all_hsp:
+            return True
+
+        has_any_hsp = self.hsp_file or self.fid_file or np.any(self.mrk)
+        return not has_any_hsp
+
+    @cached_property
+    def _get_dev_head_trans(self):
+        if (self.mrk is None) or not np.any(self.fid):
+            return np.eye(4)
+
+        src_pts = self.mrk
+        dst_pts = self.elp
+
+        n_use = len(self.use_mrk)
+        if n_use < 3:
+            error(None, "Estimating the device head transform requires at "
+                  "least 3 marker points. Please adjust the markers used.",
+                  "Not Enough Marker Points")
+            return
+        elif n_use < 5:
+            src_pts = src_pts[self.use_mrk]
+            dst_pts = dst_pts[self.use_mrk]
+
+        trans = fit_matched_points(src_pts, dst_pts, out='trans')
+        return trans
+
+    @cached_property
+    def _get_elp(self):
+        if self.elp_raw is None:
+            return np.empty((0, 3))
+        pts = self.elp_raw[3:]
+        pts = apply_trans(self.polhemus_neuromag_trans, pts)
+        return pts
+
+    @cached_property
+    def _get_elp_raw(self):
+        if not self.fid_file:
+            return
+
+        try:
+            pts = read_elp(self.fid_file)
+        except Exception as err:
+            error(None, str(err), "Error Reading Fiducials")
+            self.reset_traits(['fid_file'])
+            raise
+        else:
+            return pts
+
+    @cached_property
+    def _get_fid(self):
+        if self.elp_raw is None:
+            return np.empty((0, 3))
+        pts = self.elp_raw[:3]
+        pts = apply_trans(self.polhemus_neuromag_trans, pts)
+        return pts
+
+    @cached_property
+    def _get_fid_fname(self):
+        if self.fid_file:
+            return os.path.basename(self.fid_file)
+        else:
+            return '-'
+
+    @cached_property
+    def _get_head_dev_trans(self):
+        return inv(self.dev_head_trans)
+
+    @cached_property
+    def _get_hsp(self):
+        if (self.hsp_raw is None) or not np.any(self.polhemus_neuromag_trans):
+            return  np.empty((0, 3))
+        else:
+            pts = apply_trans(self.polhemus_neuromag_trans, self.hsp_raw)
+            return pts
+
+    @cached_property
+    def _get_hsp_fname(self):
+        if self.hsp_file:
+            return os.path.basename(self.hsp_file)
+        else:
+            return '-'
+
+    @cached_property
+    def _get_hsp_raw(self):
+        fname = self.hsp_file
+        if not fname:
+            return
+
+        try:
+            pts = read_hsp(fname)
+
+            n_pts = len(pts)
+            if n_pts > KIT.DIG_POINTS:
+                msg = ("The selected head shape contains {n_in} points, "
+                       "which is more than the recommended maximum ({n_rec}). "
+                       "The file will be automatically downsampled, which "
+                       "might take a while. A better way to downsample is "
+                       "using FastScan.")
+                msg = msg.format(n_in=n_pts, n_rec=KIT.DIG_POINTS)
+                information(None, msg, "Too Many Head Shape Points")
+                pts = _decimate_points(pts, 5)
+
+        except Exception as err:
+            error(None, str(err), "Error Reading Head Shape")
+            self.reset_traits(['hsp_file'])
+            raise
+        else:
+            return pts
+
+    @cached_property
+    def _get_mrk(self):
+        return apply_trans(als_ras_trans, self.markers.mrk3.points)
+
+    @cached_property
+    def _get_polhemus_neuromag_trans(self):
+        if self.elp_raw is None:
+            return
+        pts = apply_trans(als_ras_trans_mm, self.elp_raw[:3])
+        nasion, lpa, rpa = pts
+        trans = get_ras_to_neuromag_trans(nasion, lpa, rpa)
+        trans = np.dot(trans, als_ras_trans_mm)
+        return trans
+
+    @cached_property
+    def _get_sqd_fname(self):
+        if self.sqd_file:
+            return os.path.basename(self.sqd_file)
+        else:
+            return '-'
+
+    def clear_all(self):
+        """Clear all specified input parameters"""
+        self.markers.mrk1.clear = True
+        self.markers.mrk2.clear = True
+        self.reset_traits(['sqd_file', 'hsp_file', 'fid_file'])
+
+    def get_event_info(self):
+        """
+        Return a string with the number of events found for each trigger value
+        """
+        if len(self.events) == 0:
+            return "No events found."
+
+        count = ["Events found:"]
+        events = np.array(self.events)
+        for i in np.unique(events):
+            n = np.sum(events == i)
+            count.append('%3i: %i' % (i, n))
+
+        return os.linesep.join(count)
+
+    def get_raw(self, preload=False):
+        """Create a raw object based on the current model settings
+        """
+        if not self.sqd_file:
+            raise ValueError("sqd file not set")
+
+        raw = RawKIT(self.sqd_file, preload=preload)
+        raw._set_stimchannels(self.stim_chs, self.stim_slope)
+
+        if np.any(self.fid):
+            raw._set_dig_neuromag(self.fid, self.elp, self.hsp,
+                                 self.dev_head_trans)
+        return raw
+
+
+class Kit2FiffFrameHandler(Handler):
+    """Handler that checks for unfinished processes before closing its window
+    """
+    def close(self, info, is_ok):
+        if info.object.kit2fiff_panel.kit2fiff_coreg_panel\
+                                                    .queue.unfinished_tasks:
+            msg = ("Can not close the window while saving is still in "
+                   "progress. Please wait until all files are processed.")
+            title = "Saving Still in Progress"
+            information(None, msg, title)
+            return False
+        else:
+            return True
+
+
+class Kit2FiffPanel(HasPrivateTraits):
+    """Control panel for kit2fiff conversion"""
+    model = Instance(Kit2FiffModel)
+
+    # model copies for view
+    use_mrk = DelegatesTo('model')
+    sqd_file = DelegatesTo('model')
+    hsp_file = DelegatesTo('model')
+    fid_file = DelegatesTo('model')
+    stim_chs = DelegatesTo('model')
+    stim_slope = DelegatesTo('model')
+
+    # info
+    can_save = DelegatesTo('model')
+    sqd_fname = DelegatesTo('model')
+    hsp_fname = DelegatesTo('model')
+    fid_fname = DelegatesTo('model')
+
+    # Source Files
+    reset_dig = Button
+
+    # Visualization
+    scene = Instance(MlabSceneModel)
+    fid_obj = Instance(PointObject)
+    elp_obj = Instance(PointObject)
+    hsp_obj = Instance(PointObject)
+
+    # Output
+    save_as = Button(label='Save FIFF...')
+    clear_all = Button(label='Clear All')
+    queue = Instance(Queue, ())
+    queue_feedback = Str('')
+    queue_current = Str('')
+    queue_len = Int(0)
+    queue_len_str = Property(Str, depends_on=['queue_len'])
+    error = Str('')
+
+    view = View(VGroup(VGroup(Item('sqd_file', label="Data"),
+                              Item('sqd_fname', show_label=False,
+                                   style='readonly'),
+                              Item('hsp_file', label='Dig Head Shape'),
+                              Item('hsp_fname', show_label=False,
+                                   style='readonly'),
+                              Item('fid_file', label='Dig Points'),
+                              Item('fid_fname', show_label=False,
+                                   style='readonly'),
+                              Item('reset_dig', label='Clear Digitizer Files',
+                                   show_label=False),
+                              Item('use_mrk', editor=use_editor,
+                                   style='custom'),
+                              label="Sources", show_border=True),
+                    VGroup(Item('stim_chs', label="Binary Coding",
+                                style='custom',
+                                editor=EnumEditor(values={'>': '1:1 ... 128',
+                                                          '<': '2:128 ... 1',
+                                                          },
+                                                  cols=2),
+                                help="Specifies the bit order in event "
+                                "channels. Assign the first bit (1) to the "
+                                "first or the last trigger channel."),
+                           Item('stim_slope', label="Event Onset",
+                                style='custom',
+                                editor=EnumEditor(
+                                           values={'+': '2:Peak (0 to 5 V)',
+                                                   '-': '1:Trough (5 to 0 V)'},
+                                           cols=2),
+                                help="Whether events are marked by a decrease "
+                                "(trough) or an increase (peak) in trigger "
+                                "channel values"),
+                           label='Events', show_border=True),
+                       HGroup(Item('save_as', enabled_when='can_save'), spring,
+                              'clear_all', show_labels=False),
+                       Item('queue_feedback', show_label=False,
+                            style='readonly'),
+                       Item('queue_current', show_label=False,
+                            style='readonly'),
+                       Item('queue_len_str', show_label=False,
+                            style='readonly'),
+                       ))
+
+    def __init__(self, *args, **kwargs):
+        super(Kit2FiffPanel, self).__init__(*args, **kwargs)
+
+        # setup save worker
+        def worker():
+            while True:
+                raw, fname = self.queue.get()
+                basename = os.path.basename(fname)
+                self.queue_len -= 1
+                self.queue_current = 'Processing: %s' % basename
+
+                # task
+                try:
+                    raw.save(fname, overwrite=True)
+                except Exception as err:
+                    self.error = str(err)
+                    res = "Error saving: %s"
+                else:
+                    res = "Saved: %s"
+
+                # finalize
+                self.queue_current = ''
+                self.queue_feedback = res % basename
+                self.queue.task_done()
+
+        t = Thread(target=worker)
+        t.daemon = True
+        t.start()
+
+        # setup mayavi visualization
+        m = self.model
+        self.fid_obj = PointObject(scene=self.scene, color=(25, 225, 25),
+                                   point_scale=5e-3)
+        m.sync_trait('fid', self.fid_obj, 'points', mutual=False)
+        m.sync_trait('head_dev_trans', self.fid_obj, 'trans', mutual=False)
+
+        self.elp_obj = PointObject(scene=self.scene, color=(50, 50, 220),
+                                   point_scale=1e-2, opacity=.2)
+        m.sync_trait('elp', self.elp_obj, 'points', mutual=False)
+        m.sync_trait('head_dev_trans', self.elp_obj, 'trans', mutual=False)
+
+        self.hsp_obj = PointObject(scene=self.scene, color=(200, 200, 200),
+                                   point_scale=2e-3)
+        m.sync_trait('hsp', self.hsp_obj, 'points', mutual=False)
+        m.sync_trait('head_dev_trans', self.hsp_obj, 'trans', mutual=False)
+
+        self.scene.camera.parallel_scale = 0.15
+        self.scene.mlab.view(0, 0, .15)
+
+    def _clear_all_fired(self):
+        self.model.clear_all()
+
+    @cached_property
+    def _get_queue_len_str(self):
+        if self.queue_len:
+            return "Queue length: %i" % self.queue_len
+        else:
+            return ''
+
+    def _reset_dig_fired(self):
+        self.reset_traits(['hsp_file', 'fid_file'])
+
+    def _save_as_fired(self):
+        # create raw
+        try:
+            raw = self.model.get_raw()
+        except Exception as err:
+            error(None, str(err), "Error Creating KIT Raw")
+            raise
+
+        # find default path
+        stem, _ = os.path.splitext(self.sqd_file)
+        if not stem.endswith('raw'):
+            stem += '-raw'
+        default_path = stem + '.fif'
+
+        # save as dialog
+        dlg = FileDialog(action="save as",
+                         wildcard="fiff raw file (*.fif)|*.fif",
+                         default_path=default_path)
+        dlg.open()
+        if dlg.return_code != OK:
+            return
+
+        fname = dlg.path
+        if not fname.endswith('.fif'):
+            fname += '.fif'
+            if os.path.exists(fname):
+                answer = confirm(None, "The file %r already exists. Should it "
+                                 "be replaced?", "Overwrite File?")
+                if answer != YES:
+                    return
+
+        self.queue.put((raw, fname))
+        self.queue_len += 1
+
+
+class Kit2FiffFrame(HasTraits):
+    """GUI for interpolating between two KIT marker files"""
+    model = Instance(Kit2FiffModel, ())
+    scene = Instance(MlabSceneModel, ())
+    headview = Instance(HeadViewController)
+    marker_panel = Instance(CombineMarkersPanel)
+    kit2fiff_panel = Instance(Kit2FiffPanel)
+
+    view = View(HGroup(VGroup(Item('marker_panel', style='custom'),
+                              show_labels=False),
+                       VGroup(Item('scene',
+                                   editor=SceneEditor(scene_class=MayaviScene),
+                                   dock='vertical', show_label=False),
+                              VGroup(headview_item, show_labels=False),
+                              ),
+                       VGroup(Item('kit2fiff_panel', style='custom'),
+                              show_labels=False),
+                       show_labels=False,
+                      ),
+                handler=Kit2FiffFrameHandler(),
+                height=700, resizable=True, buttons=NoButtons)
+
+    def _headview_default(self):
+        return HeadViewController(scene=self.scene, scale=160, system='RAS')
+
+    def _kit2fiff_panel_default(self):
+        return Kit2FiffPanel(scene=self.scene, model=self.model)
+
+    def _marker_panel_default(self):
+        return CombineMarkersPanel(scene=self.scene, model=self.model.markers,
+                                   trans=als_ras_trans)
diff --git a/mne/gui/_marker_gui.py b/mne/gui/_marker_gui.py
new file mode 100644
index 0000000..fb14b04
--- /dev/null
+++ b/mne/gui/_marker_gui.py
@@ -0,0 +1,447 @@
+"""Mayavi/traits GUI for averaging two sets of KIT marker points"""
+
+# Authors: Christian Brodbeck <christianbrodbeck at nyu.edu>
+#
+# License: BSD (3-clause)
+
+import os
+
+import numpy as np
+
+# allow import without traits
+try:
+    from mayavi.core.ui.mayavi_scene import MayaviScene
+    from mayavi.tools.mlab_scene_model import MlabSceneModel
+    from pyface.api import confirm, error, FileDialog, OK, YES
+    from traits.api import (HasTraits, HasPrivateTraits, on_trait_change,
+                            cached_property, Instance, Property, Array, Bool,
+                            Button, Enum, File, Float, List, Str)
+    from traitsui.api import View, Item, HGroup, VGroup, CheckListEditor
+    from traitsui.menu import NoButtons
+    from tvtk.pyface.scene_editor import SceneEditor
+except:
+    from ..utils import trait_wraith
+    HasTraits = object
+    HasPrivateTraits = object
+    cached_property = trait_wraith
+    on_trait_change = trait_wraith
+    MayaviScene = trait_wraith
+    MlabSceneModel = trait_wraith
+    Array = trait_wraith
+    Bool = trait_wraith
+    Button = trait_wraith
+    Enum = trait_wraith
+    File = trait_wraith
+    Float = trait_wraith
+    Instance = trait_wraith
+    Int = trait_wraith
+    List = trait_wraith
+    Property = trait_wraith
+    Str = trait_wraith
+    View = trait_wraith
+    Item = trait_wraith
+    HGroup = trait_wraith
+    VGroup = trait_wraith
+    CheckListEditor = trait_wraith
+    NoButtons = trait_wraith
+    SceneEditor = trait_wraith
+
+from ..transforms import apply_trans, rotation, translation
+from ..coreg import fit_matched_points
+from ..fiff.kit import read_mrk, write_mrk
+from ._viewer import HeadViewController, headview_borders, PointObject
+
+
+backend_is_wx = False  # is there a way to determine this?
+if backend_is_wx:
+    mrk_wildcard = ['Supported Files (*.sqd, *.mrk, *.txt, *.pickled)|'
+                    '*.sqd;*.mrk;*.txt;*.pickled',
+                    'Sqd marker file (*.sqd;*.mrk)|*.sqd;*.mrk',
+                    'Text marker file (*.txt)|*.txt',
+                    'Pickled markers (*.pickled)|*.pickled']
+    mrk_out_wildcard = ["Tab separated values file (*.txt)|*.txt",
+                        "Pickled KIT parameters (*.pickled)|*.pickled"]
+else:
+    mrk_wildcard = ["*.sqd;*.mrk;*.txt;*.pickled"]
+    mrk_out_wildcard = ["*.txt;*.pickled"]
+out_ext = ['.txt', '.pickled']
+
+
+use_editor_v = CheckListEditor(cols=1, values=[(i, str(i)) for i in xrange(5)])
+use_editor_h = CheckListEditor(cols=5, values=[(i, str(i)) for i in xrange(5)])
+
+mrk_view_editable = View(
+        VGroup('file',
+               Item('name', show_label=False, style='readonly'),
+               HGroup(
+                      Item('use', editor=use_editor_v, enabled_when="enabled",
+                           style='custom'),
+                      'points',
+                      ),
+               HGroup(Item('clear', enabled_when="can_save", show_label=False),
+                      Item('save_as', enabled_when="can_save",
+                           show_label=False)),
+                  ))
+
+mrk_view_basic = View(
+        VGroup('file',
+               Item('name', show_label=False, style='readonly'),
+               Item('use', editor=use_editor_h, enabled_when="enabled",
+                    style='custom'),
+               HGroup(Item('clear', enabled_when="can_save", show_label=False),
+                      Item('edit', show_label=False),
+                      Item('save_as', enabled_when="can_save",
+                           show_label=False)),
+                  ))
+
+mrk_view_edit = View(VGroup('points'))
+
+
+class MarkerPoints(HasPrivateTraits):
+    """Represent 5 marker points"""
+    points = Array(float, (5, 3))
+
+    can_save = Property(depends_on='points')
+    save_as = Button()
+
+    view = View(VGroup('points',
+                       Item('save_as', enabled_when='can_save')))
+
+    @cached_property
+    def _get_can_save(self):
+        return np.any(self.points)
+
+    def _save_as_fired(self):
+        dlg = FileDialog(action="save as", wildcard=mrk_out_wildcard,
+                         default_filename=self.name,
+                         default_directory=self.dir)
+        dlg.open()
+        if dlg.return_code != OK:
+            return
+
+        ext = out_ext[dlg.wildcard_index]
+        path = dlg.path
+        if not path.endswith(ext):
+            path = path + ext
+            if os.path.exists(path):
+                answer = confirm(None, "The file %r already exists. Should it "
+                                 "be replaced?", "Overwrite File?")
+                if answer != YES:
+                    return
+        self.save(path)
+
+    def save(self, path):
+        """Save the marker points
+
+        Parameters
+        ----------
+        path : str
+            Path to the file to write. The kind of file to write is determined
+            based on the extension: '.txt' for tab separated text file,
+            '.pickled' for pickled file.
+        """
+        write_mrk(path, self.points)
+
+
+class MarkerPointSource(MarkerPoints):
+    """MarkerPoints subclass for source files"""
+    file = File(filter=mrk_wildcard, exists=True)
+    name = Property(Str, depends_on='file')
+    dir = Property(Str, depends_on='file')
+
+    use = List(range(5), desc="Which points to use for the interpolated "
+               "marker.")
+    enabled = Property(Bool, depends_on=['points', 'use'])
+    clear = Button(desc="Clear the current marker data")
+    edit = Button(desc="Edit the marker coordinates manually")
+
+    view = mrk_view_basic
+
+    @cached_property
+    def _get_enabled(self):
+        return np.any(self.points)
+
+    @cached_property
+    def _get_dir(self):
+        if self.file:
+            return os.path.dirname(self.file)
+
+    @cached_property
+    def _get_name(self):
+        if self.file:
+            return os.path.basename(self.file)
+
+    @on_trait_change('file')
+    def load(self, fname):
+        if not fname:
+            self.reset_traits(['points'])
+            return
+
+        try:
+            pts = read_mrk(fname)
+        except Exception as err:
+            error(None, str(err), "Error Reading mrk")
+            self.reset_traits(['points'])
+        else:
+            self.points = pts
+
+    def _clear_fired(self):
+        self.reset_traits(['file', 'points'])
+
+    def _edit_fired(self):
+        self.edit_traits(view=mrk_view_edit)
+
+
+class MarkerPointDest(MarkerPoints):
+    """MarkerPoints subclass that serves for derived points"""
+    src1 = Instance(MarkerPointSource)
+    src2 = Instance(MarkerPointSource)
+
+    name = Property(Str, depends_on='src1.name,src2.name')
+    dir = Property(Str, depends_on='src1.dir,src2.dir')
+
+    points = Property(Array(float, (5, 3)),
+                      depends_on=['method', 'src1.points', 'src1.use',
+                                  'src2.points', 'src2.use'])
+    enabled = Property(Bool, depends_on=['points'])
+
+    method = Enum('Transform', 'Average', desc="Transform: estimate a rotation"
+                  "/translation from mrk1 to mrk2; Average: use the average "
+                  "of the mrk1 and mrk2 coordinates for each point.")
+
+    view = View(VGroup(Item('method', style='custom'),
+                       Item('save_as', enabled_when='can_save',
+                            show_label=False)))
+
+    @cached_property
+    def _get_dir(self):
+        return self.src1.dir
+
+    @cached_property
+    def _get_name(self):
+        n1 = self.src1.name
+        n2 = self.src2.name
+
+        if not n1:
+            if n2:
+                return n2
+            else:
+                return ''
+        elif not n2:
+            return n1
+
+        if n1 == n2:
+            return n1
+
+        i = 0
+        l1 = len(n1) - 1
+        l2 = len(n1) - 2
+        while n1[i] == n2[i]:
+            if i == l1:
+                return n1
+            elif i == l2:
+                return n2
+
+            i += 1
+
+        return n1[:i]
+
+    @cached_property
+    def _get_enabled(self):
+        return np.any(self.points)
+
+    @cached_property
+    def _get_points(self):
+        # in case only one or no source is enabled
+        if not (self.src1 and self.src1.enabled):
+            if (self.src2 and self.src2.enabled):
+                return self.src2.points
+            else:
+                return np.zeros((5, 3))
+        elif not (self.src2 and self.src2.enabled):
+            return self.src1.points
+
+        # Average method
+        if self.method == 'Average':
+            if len(np.union1d(self.src1.use, self.src2.use)) < 5:
+                error(None, "Need at least one source for each point.",
+                      "Marker Average Error")
+                return np.zeros((5, 3))
+
+            pts = (self.src1.points + self.src2.points) / 2.
+            for i in np.setdiff1d(self.src1.use, self.src2.use):
+                pts[i] = self.src1.points[i]
+            for i in np.setdiff1d(self.src2.use, self.src1.use):
+                pts[i] = self.src2.points[i]
+
+            return pts
+
+        # Transform method
+        idx = np.intersect1d(self.src1.use, self.src2.use, assume_unique=True)
+        if len(idx) < 3:
+            error(None, "Need at least three shared points for trans"
+                  "formation.", "Marker Interpolation Error")
+            return np.zeros((5, 3))
+
+        src_pts = self.src1.points[idx]
+        tgt_pts = self.src2.points[idx]
+        est = fit_matched_points(src_pts, tgt_pts, out='params')
+        rot = np.array(est[:3]) / 2.
+        tra = np.array(est[3:]) / 2.
+
+        if len(self.src1.use) == 5:
+            trans = np.dot(translation(*tra), rotation(*rot))
+            pts = apply_trans(trans, self.src1.points)
+        elif len(self.src2.use) == 5:
+            trans = np.dot(translation(* -tra), rotation(* -rot))
+            pts = apply_trans(trans, self.src2.points)
+        else:
+            trans1 = np.dot(translation(*tra), rotation(*rot))
+            pts = apply_trans(trans1, self.src1.points)
+            trans2 = np.dot(translation(* -tra), rotation(* -rot))
+            for i in np.setdiff1d(self.src2.use, self.src1.use):
+                pts[i] = apply_trans(trans2, self.src2.points[i])
+
+        return pts
+
+
+class CombineMarkersModel(HasPrivateTraits):
+    mrk1_file = Instance(File)
+    mrk2_file = Instance(File)
+    mrk1 = Instance(MarkerPointSource)
+    mrk2 = Instance(MarkerPointSource)
+    mrk3 = Instance(MarkerPointDest)
+
+    # stats
+    distance = Property(Str, depends_on=['mrk1.points', 'mrk2.points'])
+
+    def _mrk1_default(self):
+        mrk = MarkerPointSource()
+        return mrk
+
+    def _mrk1_file_default(self):
+        return self.mrk1.trait('file')
+
+    def _mrk2_default(self):
+        mrk = MarkerPointSource()
+        return mrk
+
+    def _mrk2_file_default(self):
+        return self.mrk2.trait('file')
+
+    def _mrk3_default(self):
+        mrk = MarkerPointDest(src1=self.mrk1, src2=self.mrk2)
+        return mrk
+
+    @cached_property
+    def _get_distance(self):
+        if (self.mrk1 is None or self.mrk2 is None
+            or (not np.any(self.mrk1.points))
+            or (not np.any(self.mrk2.points))):
+            return ""
+
+        ds = np.sqrt(np.sum((self.mrk1.points - self.mrk2.points) ** 2, 1))
+        desc = '\t'.join('%.1f mm' % (d * 1000) for d in ds)
+        return desc
+
+
+class CombineMarkersPanel(HasTraits):
+    """Has two marker points sources and interpolates to a third one"""
+    model = Instance(CombineMarkersModel, ())
+
+    # model references for UI
+    mrk1 = Instance(MarkerPointSource)
+    mrk2 = Instance(MarkerPointSource)
+    mrk3 = Instance(MarkerPointDest)
+    distance = Str
+
+    # Visualization
+    scene = Instance(MlabSceneModel)
+    scale = Float(5e-3)
+    mrk1_obj = Instance(PointObject)
+    mrk2_obj = Instance(PointObject)
+    mrk3_obj = Instance(PointObject)
+    trans = Array()
+
+    view = View(VGroup(VGroup(Item('mrk1', style='custom'),
+                              Item('mrk1_obj', style='custom'),
+                              show_labels=False,
+                              label="Source Marker 1", show_border=True),
+                       VGroup(Item('mrk2', style='custom'),
+                              Item('mrk2_obj', style='custom'),
+                              show_labels=False,
+                              label="Source Marker 2", show_border=True),
+                       VGroup(Item('distance', style='readonly'),
+                              label='Stats', show_border=True),
+                       VGroup(Item('mrk3', style='custom'),
+                              Item('mrk3_obj', style='custom'),
+                              show_labels=False,
+                              label="New Marker", show_border=True),
+                       ))
+
+    def _mrk1_default(self):
+        return self.model.mrk1
+
+    def _mrk2_default(self):
+        return self.model.mrk2
+
+    def _mrk3_default(self):
+        return self.model.mrk3
+
+    def __init__(self, *args, **kwargs):
+        super(CombineMarkersPanel, self).__init__(*args, **kwargs)
+
+        m = self.model
+        m.sync_trait('distance', self, 'distance', mutual=False)
+
+        self.mrk1_obj = PointObject(scene=self.scene, color=(155, 55, 55),
+                                    point_scale=self.scale)
+        self.sync_trait('trans', self.mrk1_obj, mutual=False)
+        m.mrk1.sync_trait('points', self.mrk1_obj, 'points', mutual=False)
+        m.mrk1.sync_trait('enabled', self.mrk1_obj, 'visible',
+                                   mutual=False)
+
+        self.mrk2_obj = PointObject(scene=self.scene, color=(55, 155, 55),
+                                    point_scale=self.scale)
+        self.sync_trait('trans', self.mrk2_obj, mutual=False)
+        m.mrk2.sync_trait('points', self.mrk2_obj, 'points', mutual=False)
+        m.mrk2.sync_trait('enabled', self.mrk2_obj, 'visible',
+                                   mutual=False)
+
+        self.mrk3_obj = PointObject(scene=self.scene, color=(150, 200, 255),
+                                    point_scale=self.scale)
+        self.sync_trait('trans', self.mrk3_obj, mutual=False)
+        m.mrk3.sync_trait('points', self.mrk3_obj, 'points', mutual=False)
+        m.mrk3.sync_trait('enabled', self.mrk3_obj, 'visible', mutual=False)
+
+
+class CombineMarkersFrame(HasTraits):
+    """GUI for interpolating between two KIT marker files
+
+    Parameters
+    ----------
+    mrk1, mrk2 : str
+        Path to pre- and post measurement marker files (*.sqd) or empty string.
+    """
+    model = Instance(CombineMarkersModel, ())
+    scene = Instance(MlabSceneModel, ())
+    headview = Instance(HeadViewController)
+    panel = Instance(CombineMarkersPanel)
+
+    def _headview_default(self):
+        return HeadViewController(scene=self.scene, system='ALS')
+
+    def _panel_default(self):
+        return CombineMarkersPanel(model=self.model, scene=self.scene)
+
+    view = View(HGroup(Item('scene',
+                            editor=SceneEditor(scene_class=MayaviScene),
+                            dock='vertical'),
+                       VGroup(headview_borders,
+                              Item('panel', style="custom"),
+                              show_labels=False),
+                       show_labels=False,
+                      ),
+                width=1100, resizable=True,
+                buttons=NoButtons)
+
diff --git a/mne/gui/_viewer.py b/mne/gui/_viewer.py
new file mode 100644
index 0000000..b132da6
--- /dev/null
+++ b/mne/gui/_viewer.py
@@ -0,0 +1,340 @@
+"""Mayavi/traits GUI visualization elements"""
+
+# Authors: Christian Brodbeck <christianbrodbeck at nyu.edu>
+#
+# License: BSD (3-clause)
+
+import numpy as np
+
+# allow import without traits
+try:
+    from mayavi.mlab import pipeline, text3d
+    from mayavi.modules.glyph import Glyph
+    from mayavi.modules.surface import Surface
+    from mayavi.sources.vtk_data_source import VTKDataSource
+    from mayavi.tools.mlab_scene_model import MlabSceneModel
+    from pyface.api import error
+    from traits.api import (HasTraits, HasPrivateTraits, on_trait_change,
+                            cached_property, Instance, Property, Array, Bool,
+                            Button, Color, Enum, Float, Int, List, Range, Str)
+    from traitsui.api import View, Item, Group, HGroup, VGrid, VGroup
+except:
+    from ..utils import trait_wraith
+    HasTraits = object
+    HasPrivateTraits = object
+    cached_property = trait_wraith
+    on_trait_change = trait_wraith
+    MlabSceneModel = trait_wraith
+    Array = trait_wraith
+    Bool = trait_wraith
+    Button = trait_wraith
+    Color = trait_wraith
+    Enum = trait_wraith
+    Float = trait_wraith
+    Instance = trait_wraith
+    Int = trait_wraith
+    List = trait_wraith
+    Property = trait_wraith
+    Range = trait_wraith
+    Str = trait_wraith
+    View = trait_wraith
+    Item = trait_wraith
+    Group = trait_wraith
+    HGroup = trait_wraith
+    VGroup = trait_wraith
+    Glyph = trait_wraith
+    Surface = trait_wraith
+    VTKDataSource = trait_wraith
+
+from ..transforms import apply_trans
+
+
+headview_item = Item('headview', style='custom', show_label=False)
+headview_borders = VGroup(Item('headview', style='custom', show_label=False),
+                          show_border=True, label='View')
+defaults = {'mri_fid_scale': 1e-2, 'hsp_fid_scale': 3e-2,
+            'hsp_fid_opacity': 0.3, 'hsp_points_scale': 4e-3,
+            'mri_color': (252, 227, 191), 'hsp_point_color': (255, 255, 255),
+            'lpa_color': (255, 0, 0), 'nasion_color': (0, 255, 0),
+            'rpa_color': (0, 0, 255)}
+
+
+class HeadViewController(HasTraits):
+    """
+    Set head views for Anterior-Left-Superior coordinate system
+
+    Parameters
+    ----------
+    system : 'RAS' | 'ALS' | 'ARI'
+        Coordinate system described as initials for directions associated with
+        the x, y, and z axes. Relevant terms are: Anterior, Right, Left,
+        Superior, Inferior.
+    """
+    system = Enum("RAS", "ALS", "ARI", desc="Coordinate system: directions of "
+                  "the x, y, and z axis.")
+
+    right = Button()
+    front = Button()
+    left = Button()
+    top = Button()
+
+    scale = Float(0.16)
+
+    scene = Instance(MlabSceneModel)
+
+    view = View(VGrid('0', 'top', '0', Item('scale', label='Scale',
+                                            show_label=True),
+                      'right', 'front', 'left', show_labels=False, columns=4))
+
+    @on_trait_change('scene.activated')
+    def _init_view(self):
+        self.scene.parallel_projection = True
+
+        # apparently scene,activated happens several times
+        if self.scene.renderer:
+            self.sync_trait('scale', self.scene.camera, 'parallel_scale')
+            # and apparently this does not happen by default:
+            self.on_trait_change(self.scene.render, 'scale')
+
+    @on_trait_change('top,left,right,front')
+    def on_set_view(self, view, _):
+        if self.scene is None:
+            return
+
+        system = self.system
+        kwargs = None
+
+        if system == 'ALS':
+            if view == 'front':
+                kwargs = dict(azimuth=0, elevation=90, roll=-90)
+            elif view == 'left':
+                kwargs = dict(azimuth=90, elevation=90, roll=180)
+            elif view == 'right':
+                kwargs = dict(azimuth=-90, elevation=90, roll=0)
+            elif view == 'top':
+                kwargs = dict(azimuth=0, elevation=0, roll=-90)
+        elif system == 'RAS':
+            if view == 'front':
+                kwargs = dict(azimuth=90, elevation=90, roll=180)
+            elif view == 'left':
+                kwargs = dict(azimuth=180, elevation=90, roll=90)
+            elif view == 'right':
+                kwargs = dict(azimuth=0, elevation=90, roll=270)
+            elif view == 'top':
+                kwargs = dict(azimuth=90, elevation=0, roll=180)
+        elif system == 'ARI':
+            if view == 'front':
+                kwargs = dict(azimuth=0, elevation=90, roll=90)
+            elif view == 'left':
+                kwargs = dict(azimuth=-90, elevation=90, roll=180)
+            elif view == 'right':
+                kwargs = dict(azimuth=90, elevation=90, roll=0)
+            elif view == 'top':
+                kwargs = dict(azimuth=0, elevation=180, roll=90)
+        else:
+            raise ValueError("Invalid system: %r" % system)
+
+        if kwargs is None:
+            raise ValueError("Invalid view: %r" % view)
+
+        self.scene.mlab.view(distance=None, reset_roll=True,
+                             figure=self.scene.mayavi_scene, **kwargs)
+
+
+class Object(HasPrivateTraits):
+    """Represents a 3d object in a mayavi scene"""
+    points = Array(float, shape=(None, 3))
+    trans = Array()
+    name = Str
+
+    scene = Instance(MlabSceneModel, ())
+    src = Instance(VTKDataSource)
+
+    color = Color()
+    rgbcolor = Property(depends_on='color')
+    point_scale = Float(10, label='Point Scale')
+    opacity = Range(low=0., high=1., value=1.)
+    visible = Bool(True)
+
+    @cached_property
+    def _get_rgbcolor(self):
+        if hasattr(self.color, 'Get'):  # wx
+            color = tuple(v / 255. for v in self.color.Get())
+        else:
+            color = self.color.getRgbF()[:3]
+        return color
+
+    @on_trait_change('trans')
+    def _update_points(self):
+        """Update the location of the plotted points"""
+        if not hasattr(self.src, 'data'):
+            return
+
+        trans = self.trans
+        if np.any(trans):
+            if trans.ndim == 0 or trans.shape == (3,) or trans.shape == (1, 3):
+                pts = self.points * trans
+            elif trans.shape == (3, 3):
+                pts = np.dot(self.points, trans.T)
+            elif trans.shape == (4, 4):
+                pts = apply_trans(trans, self.points)
+            else:
+                err = ("trans must be a scalar, a length 3 sequence, or an "
+                       "array of shape (1,3), (3, 3) or (4, 4). "
+                       "Got %s" % str(trans))
+                error(None, err, "Display Error")
+                raise ValueError(err)
+        else:
+            pts = self.points
+
+        self.src.data.points = pts
+
+
+class PointObject(Object):
+    """Represents a group of individual points in a mayavi scene"""
+    label = Bool(False, enabled_when='visible')
+    text3d = List
+
+    glyph = Instance(Glyph)
+    resolution = Int(8)
+
+    def __init__(self, view='points', *args, **kwargs):
+        """
+        Parameters
+        ----------
+        view : 'points' | 'cloud'
+            Whether the view options should be tailored to individual points
+            or a point cloud.
+        """
+        self._view = view
+        super(PointObject, self).__init__(*args, **kwargs)
+
+    def default_traits_view(self):
+        color = Item('color', show_label=False)
+        scale = Item('point_scale', label='Size')
+        if self._view == 'points':
+            visible = Item('visible', label='Show', show_label=True)
+            view = View(HGroup(visible, color, scale, 'label'))
+        elif self._view == 'cloud':
+            visible = Item('visible', show_label=False)
+            view = View(HGroup(visible, color, scale))
+        else:
+            raise ValueError("PointObject(view = %r)" % self._view)
+        return view
+
+    @on_trait_change('label')
+    def _show_labels(self, show):
+        self.scene.disable_render = True
+        while self.text3d:
+            text = self.text3d.pop()
+            text.remove()
+
+        if show:
+            fig = self.scene.mayavi_scene
+            for i, pt in enumerate(np.array(self.src.data.points)):
+                x, y, z = pt
+                t = text3d(x, y, z, ' %i' % i, scale=.01, color=self.rgbcolor,
+                           figure=fig)
+                self.text3d.append(t)
+
+        self.scene.disable_render = False
+
+    @on_trait_change('visible')
+    def _on_hide(self):
+        if not self.visible:
+            self.label = False
+
+    @on_trait_change('scene.activated')
+    def _plot_points(self):
+        """Add the points to the mayavi pipeline"""
+#         _scale = self.scene.camera.parallel_scale
+
+        if hasattr(self.glyph, 'remove'):
+            self.glyph.remove()
+        if hasattr(self.src, 'remove'):
+            self.src.remove()
+
+        fig = self.scene.mayavi_scene
+
+        x, y, z = self.points.T
+        scatter = pipeline.scalar_scatter(x, y, z)
+        glyph = pipeline.glyph(scatter, color=self.rgbcolor, figure=fig,
+                               scale_factor=self.point_scale, opacity=1.,
+                               resolution=self.resolution)
+        self.src = scatter
+        self.glyph = glyph
+
+        self.sync_trait('point_scale', self.glyph.glyph.glyph, 'scale_factor')
+        self.sync_trait('rgbcolor', self.glyph.actor.property, 'color',
+                        mutual=False)
+        self.sync_trait('visible', self.glyph)
+        self.sync_trait('opacity', self.glyph.actor.property)
+        self.on_trait_change(self._update_points, 'points')
+
+#         self.scene.camera.parallel_scale = _scale
+
+    def _resolution_changed(self, new):
+        if not self.glyph:
+            return
+
+        self.glyph.glyph.glyph_source.glyph_source.phi_resolution = new
+        self.glyph.glyph.glyph_source.glyph_source.theta_resolution = new
+
+
+class SurfaceObject(Object):
+    """Represents a solid object in a mayavi scene
+
+    Notes
+    -----
+    Doesn't automatically update plot because update requires both
+    :attr:`points` and :attr:`tri`. Call :meth:`plot` after updateing both
+    attributes.
+
+    """
+    rep = Enum("Surface", "Wireframe")
+    tri = Array(int, shape=(None, 3))
+
+    surf = Instance(Surface)
+
+    view = View(HGroup(Item('visible', show_label=False),
+                       Item('color', show_label=False), Item('opacity')))
+
+    def clear(self):
+        if hasattr(self.src, 'remove'):
+            self.src.remove()
+        if hasattr(self.surf, 'remove'):
+            self.surf.remove()
+        self.reset_traits(['src', 'surf'])
+
+    @on_trait_change('scene.activated')
+    def plot(self):
+        """Add the points to the mayavi pipeline"""
+        _scale = self.scene.camera.parallel_scale
+        self.clear()
+
+        if not np.any(self.tri):
+            return
+
+        fig = self.scene.mayavi_scene
+
+        x, y, z = self.points.T
+
+        if self.rep == 'Wireframe':
+            rep = 'wireframe'
+        else:
+            rep = 'surface'
+
+        src = pipeline.triangular_mesh_source(x, y, z, self.tri, figure=fig)
+        surf = pipeline.surface(src, figure=fig, color=self.rgbcolor,
+                                opacity=self.opacity,
+                                representation=rep, line_width=1)
+
+        self.src = src
+        self.surf = surf
+
+        self.sync_trait('visible', self.surf, 'visible')
+        self.sync_trait('rgbcolor', self.surf.actor.property, 'color',
+                        mutual=False)
+        self.sync_trait('opacity', self.surf.actor.property, 'opacity')
+
+        self.scene.camera.parallel_scale = _scale
diff --git a/mne/fiff/kit/tests/__init__.py b/mne/gui/tests/__init__.py
similarity index 100%
copy from mne/fiff/kit/tests/__init__.py
copy to mne/gui/tests/__init__.py
diff --git a/mne/gui/tests/test_coreg_gui.py b/mne/gui/tests/test_coreg_gui.py
new file mode 100644
index 0000000..f7608df
--- /dev/null
+++ b/mne/gui/tests/test_coreg_gui.py
@@ -0,0 +1,157 @@
+# Author: Christian Brodbeck <christianbrodbeck at nyu.edu>
+#
+# License: BSD (3-clause)
+
+import os
+
+import numpy as np
+from numpy.testing import assert_allclose
+from nose.tools import (assert_equal, assert_almost_equal, assert_false,
+                        assert_is_instance, assert_less, assert_raises,
+                        assert_true)
+
+import mne
+from mne.datasets import sample
+from mne.utils import _TempDir, requires_traits, requires_mne_fs_in_env
+
+
+data_path = sample.data_path(download=False)
+raw_path = os.path.join(data_path, 'MEG', 'sample', 'sample_audvis_raw.fif')
+subjects_dir = os.path.join(data_path, 'subjects')
+
+tempdir = _TempDir()
+
+trans_dst = os.path.join(tempdir, 'test-trans.fif')
+
+
+ at sample.requires_sample_data
+ at requires_traits
+def test_coreg_model():
+    """Test CoregModel"""
+    from mne.gui._coreg_gui import CoregModel
+
+    model = CoregModel()
+    assert_raises(RuntimeError, model.save_trans, 'blah.fif')
+
+    model.mri.subjects_dir = subjects_dir
+    model.mri.subject = 'sample'
+
+    assert_false(model.mri.fid_ok)
+    model.mri.lpa = [[-0.06, 0, 0]]
+    model.mri.nasion = [[0, 0.05, 0]]
+    model.mri.rpa = [[0.08, 0, 0]]
+    assert_true(model.mri.fid_ok)
+
+    model.hsp.file = raw_path
+    assert_allclose(model.hsp.lpa, [[-7.137e-2, 0, 5.122e-9]], 1e-4)
+    assert_allclose(model.hsp.rpa, [[ 7.527e-2, 0, 5.588e-9]], 1e-4)
+    assert_allclose(model.hsp.nasion, [[ 3.725e-9, 1.026e-1, 4.191e-9]], 1e-4)
+    assert_true(model.has_fid_data)
+
+    lpa_distance = model.lpa_distance
+    nasion_distance = model.nasion_distance
+    rpa_distance = model.rpa_distance
+    avg_point_distance = np.mean(model.point_distance)
+
+    model.fit_auricular_points()
+    old_x = lpa_distance ** 2 + rpa_distance ** 2
+    new_x = model.lpa_distance ** 2 + model.rpa_distance ** 2
+    assert_less(new_x, old_x)
+
+    model.fit_fiducials()
+    old_x = lpa_distance ** 2 + rpa_distance ** 2 + nasion_distance ** 2
+    new_x = (model.lpa_distance ** 2 + model.rpa_distance ** 2
+             + model.nasion_distance ** 2)
+    assert_less(new_x, old_x)
+
+    model.fit_hsp_points()
+    assert_less(np.mean(model.point_distance), avg_point_distance)
+
+    model.save_trans(trans_dst)
+    trans = mne.read_trans(trans_dst)
+    assert_allclose(trans['trans'], model.head_mri_trans)
+
+    # test restoring trans
+    x, y, z, rot_x, rot_y, rot_z = .1, .2, .05, 1.5, 0.1, -1.2
+    model.trans_x = x
+    model.trans_y = y
+    model.trans_z = z
+    model.rot_x = rot_x
+    model.rot_y = rot_y
+    model.rot_z = rot_z
+    trans = model.head_mri_trans
+    model.reset_traits(["trans_x", "trans_y", "trans_z", "rot_x", "rot_y",
+                        "rot_z"])
+    assert_equal(model.trans_x, 0)
+    model.set_trans(trans)
+    assert_almost_equal(model.trans_x, x)
+    assert_almost_equal(model.trans_y, y)
+    assert_almost_equal(model.trans_z, z)
+    assert_almost_equal(model.rot_x, rot_x)
+    assert_almost_equal(model.rot_y, rot_y)
+    assert_almost_equal(model.rot_z, rot_z)
+
+    # info
+    assert_is_instance(model.fid_eval_str, basestring)
+    assert_is_instance(model.points_eval_str, basestring)
+
+
+ at sample.requires_sample_data
+ at requires_traits
+ at requires_mne_fs_in_env
+def test_coreg_model_with_fsaverage():
+    """Test CoregModel"""
+    from mne.gui._coreg_gui import CoregModel
+
+    mne.create_default_subject(subjects_dir=tempdir)
+
+    model = CoregModel()
+    model.mri.subjects_dir = tempdir
+    model.mri.subject = 'fsaverage'
+    assert_true(model.mri.fid_ok)
+
+    model.hsp.file = raw_path
+    lpa_distance = model.lpa_distance
+    nasion_distance = model.nasion_distance
+    rpa_distance = model.rpa_distance
+    avg_point_distance = np.mean(model.point_distance)
+
+    # test hsp point omission
+    model.trans_y = -0.008
+    model.fit_auricular_points()
+    model.omit_hsp_points(0.02)
+    assert_equal(model.hsp.n_omitted, 1)
+    model.omit_hsp_points(reset=True)
+    assert_equal(model.hsp.n_omitted, 0)
+    model.omit_hsp_points(0.02, reset=True)
+    assert_equal(model.hsp.n_omitted, 1)
+
+    # scale with 1 parameter
+    model.n_scale_params = 1
+
+    model.fit_scale_auricular_points()
+    old_x = lpa_distance ** 2 + rpa_distance ** 2
+    new_x = model.lpa_distance ** 2 + model.rpa_distance ** 2
+    assert_less(new_x, old_x)
+
+    model.fit_scale_fiducials()
+    old_x = lpa_distance ** 2 + rpa_distance ** 2 + nasion_distance ** 2
+    new_x = (model.lpa_distance ** 2 + model.rpa_distance ** 2
+             + model.nasion_distance ** 2)
+    assert_less(new_x, old_x)
+
+    model.fit_scale_hsp_points()
+    avg_point_distance_1param = np.mean(model.point_distance)
+    assert_less(avg_point_distance_1param, avg_point_distance)
+
+    desc, func, args, kwargs = model.get_scaling_job('test')
+    assert_true(isinstance(desc, basestring))
+    assert_equal(args[0], 'fsaverage')
+    assert_equal(args[1], 'test')
+    assert_allclose(args[2], model.scale)
+    assert_equal(kwargs['subjects_dir'], tempdir)
+
+    # scale with 3 parameters
+    model.n_scale_params = 3
+    model.fit_scale_hsp_points()
+    assert_less(np.mean(model.point_distance), avg_point_distance_1param)
diff --git a/mne/gui/tests/test_fiducials_gui.py b/mne/gui/tests/test_fiducials_gui.py
new file mode 100644
index 0000000..dfe1413
--- /dev/null
+++ b/mne/gui/tests/test_fiducials_gui.py
@@ -0,0 +1,68 @@
+# Authors: Christian Brodbeck <christianbrodbeck at nyu.edu>
+#
+# License: BSD (3-clause)
+
+import os
+
+from numpy.testing import assert_array_equal
+from nose.tools import assert_true, assert_false, assert_equal
+
+from mne.datasets import sample
+from mne.utils import _TempDir, requires_traits
+
+sample_path = sample.data_path(download=False)
+subjects_dir = os.path.join(sample_path, 'subjects')
+
+tempdir = _TempDir()
+tgt_fname = os.path.join(tempdir, 'test-fiducials.fif')
+
+
+ at sample.requires_sample_data
+ at requires_traits
+def test_mri_model():
+    """Test MRIHeadWithFiducialsModel Traits Model"""
+    from mne.gui._fiducials_gui import MRIHeadWithFiducialsModel
+
+    model = MRIHeadWithFiducialsModel(subjects_dir=subjects_dir)
+    model.subject = 'sample'
+    assert_equal(model.default_fid_fname[-20:], "sample-fiducials.fif")
+    assert_false(model.can_reset)
+    assert_false(model.can_save)
+    model.lpa = [[-1, 0, 0]]
+    model.nasion = [[ 0, 1, 0]]
+    model.rpa = [[ 1, 0, 0]]
+    assert_false(model.can_reset)
+    assert_true(model.can_save)
+
+    bem_fname = os.path.basename(model.bem.file)
+    assert_false(model.can_reset)
+    assert_equal(bem_fname, 'sample-head.fif')
+
+    model.save(tgt_fname)
+    assert_equal(model.fid_file, tgt_fname)
+
+    # resetting the file should not affect the model's fiducials
+    model.fid_file = ''
+    assert_array_equal(model.lpa, [[-1, 0, 0]])
+    assert_array_equal(model.nasion, [[ 0, 1, 0]])
+    assert_array_equal(model.rpa, [[ 1, 0, 0]])
+
+    # reset model
+    model.lpa = [[0, 0, 0]]
+    model.nasion = [[0, 0, 0]]
+    model.rpa = [[0, 0, 0]]
+    assert_array_equal(model.lpa, [[0, 0, 0]])
+    assert_array_equal(model.nasion, [[0, 0, 0]])
+    assert_array_equal(model.rpa, [[0, 0, 0]])
+
+    # loading the file should assign the model's fiducials
+    model.fid_file = tgt_fname
+    assert_array_equal(model.lpa, [[-1, 0, 0]])
+    assert_array_equal(model.nasion, [[ 0, 1, 0]])
+    assert_array_equal(model.rpa, [[ 1, 0, 0]])
+
+    # after changing from file model should be able to reset
+    model.nasion = [[1, 1, 1]]
+    assert_true(model.can_reset)
+    model.reset = True
+    assert_array_equal(model.nasion, [[ 0, 1, 0]])
diff --git a/mne/gui/tests/test_file_traits.py b/mne/gui/tests/test_file_traits.py
new file mode 100644
index 0000000..953203c
--- /dev/null
+++ b/mne/gui/tests/test_file_traits.py
@@ -0,0 +1,110 @@
+# Author: Christian Brodbeck <christianbrodbeck at nyu.edu>
+#
+# License: BSD (3-clause)
+
+import os
+
+from numpy import array
+from numpy.testing import assert_allclose
+from nose.tools import assert_equal, assert_false, assert_raises, assert_true
+
+from mne.datasets import sample
+from mne.fiff.tests import data_dir as fiff_data_dir
+from mne.utils import _TempDir, requires_mne_fs_in_env, requires_traits
+
+data_path = sample.data_path(download=False)
+subjects_dir = os.path.join(data_path, 'subjects')
+bem_path = os.path.join(subjects_dir, 'sample', 'bem', 'sample-5120-bem.fif')
+raw_path = os.path.join(data_path, 'MEG', 'sample', 'sample_audvis_raw.fif')
+fid_path = os.path.join(fiff_data_dir, 'fsaverage-fiducials.fif')
+tempdir = _TempDir()
+
+
+ at sample.requires_sample_data
+ at requires_traits
+def test_bem_source():
+    """Test BemSource"""
+    from mne.gui._file_traits import BemSource
+
+    bem = BemSource()
+    assert_equal(bem.points.shape, (0, 3))
+    assert_equal(bem.tris.shape, (0, 3))
+
+    bem.file = bem_path
+    assert_equal(bem.points.shape, (2562, 3))
+    assert_equal(bem.tris.shape, (5120, 3))
+
+
+ at requires_traits
+ at requires_mne_fs_in_env
+def test_assert_env_set():
+    """Test environment variable detection"""
+    from mne.gui._file_traits import assert_env_set
+
+    assert_true(assert_env_set(True, True))
+
+
+ at sample.requires_sample_data
+ at requires_traits
+def test_fiducials_source():
+    """Test FiducialsSource"""
+    from mne.gui._file_traits import FiducialsSource
+
+    fid = FiducialsSource()
+    fid.file = fid_path
+
+    points = array([[-0.08061612, -0.02908875, -0.04131077],
+                    [ 0.00146763, 0.08506715, -0.03483611],
+                    [ 0.08436285, -0.02850276, -0.04127743]])
+    assert_allclose(fid.points, points, 1e-6)
+
+    fid.file = ''
+    assert_equal(fid.points, None)
+
+
+ at sample.requires_sample_data
+ at requires_traits
+def test_raw_source():
+    """Test RawSource"""
+    from mne.gui._file_traits import RawSource
+
+    raw = RawSource()
+    assert_equal(raw.raw_fname, '-')
+
+    raw.file = raw_path
+    assert_equal(raw.raw_dir, os.path.dirname(raw_path))
+
+    lpa = array([[ -7.13766068e-02, 0.00000000e+00, 5.12227416e-09]])
+    nasion = array([[  3.72529030e-09, 1.02605611e-01, 4.19095159e-09]])
+    rpa = array([[  7.52676800e-02, 0.00000000e+00, 5.58793545e-09]])
+    assert_allclose(raw.lpa, lpa)
+    assert_allclose(raw.nasion, nasion)
+    assert_allclose(raw.rpa, rpa)
+
+
+ at sample.requires_sample_data
+ at requires_traits
+def test_subject_source():
+    """Test SubjectSelector"""
+    from mne.gui._file_traits import MRISubjectSource
+
+    mri = MRISubjectSource()
+    mri.subjects_dir = subjects_dir
+    assert_true('sample' in mri.subjects)
+    mri.subject = 'sample'
+
+
+ at sample.requires_sample_data
+ at requires_traits
+ at requires_mne_fs_in_env
+def test_subject_source_with_fsaverage():
+    """Test SubjectSelector"""
+    from mne.gui._file_traits import MRISubjectSource
+
+    mri = MRISubjectSource()
+    assert_false(mri.can_create_fsaverage)
+    assert_raises(RuntimeError, mri.create_fsaverage)
+
+    mri.subjects_dir = tempdir
+    assert_true(mri.can_create_fsaverage)
+    mri.create_fsaverage()
diff --git a/mne/gui/tests/test_kit2fiff_gui.py b/mne/gui/tests/test_kit2fiff_gui.py
new file mode 100644
index 0000000..33631bd
--- /dev/null
+++ b/mne/gui/tests/test_kit2fiff_gui.py
@@ -0,0 +1,65 @@
+# Authors: Christian Brodbeck <christianbrodbeck at nyu.edu>
+#
+# License: BSD (3-clause)
+
+import os
+
+import numpy as np
+from numpy.testing import assert_allclose
+from nose.tools import assert_true, assert_false, assert_equal
+
+from mne.fiff.kit.tests import data_dir as kit_data_dir
+from mne.fiff import Raw
+from mne.utils import _TempDir, requires_traits
+
+mrk_pre_path = os.path.join(kit_data_dir, 'test_mrk_pre.sqd')
+mrk_post_path = os.path.join(kit_data_dir, 'test_mrk_post.sqd')
+sqd_path = os.path.join(kit_data_dir, 'test.sqd')
+hsp_path = os.path.join(kit_data_dir, 'test_hsp.txt')
+fid_path = os.path.join(kit_data_dir, 'test_elp.txt')
+fif_path = os.path.join(kit_data_dir, 'test_bin.fif')
+
+tempdir = _TempDir()
+tgt_fname = os.path.join(tempdir, 'test-raw.fif')
+std_fname = os.path.join(tempdir, 'test_std-raw.fif')
+
+
+ at requires_traits
+def test_kit2fiff_model():
+    """Test CombineMarkersModel Traits Model"""
+    from mne.gui._kit2fiff_gui import Kit2FiffModel
+
+    model = Kit2FiffModel()
+    assert_false(model.can_save)
+    model.markers.mrk1.file = mrk_pre_path
+    model.markers.mrk2.file = mrk_post_path
+    model.sqd_file = sqd_path
+    model.hsp_file = hsp_path
+    assert_false(model.can_save)
+    model.fid_file = fid_path
+
+    # export raw
+    assert_true(model.can_save)
+    raw_out = model.get_raw()
+    raw_out.save(tgt_fname)
+    raw = Raw(tgt_fname)
+
+    # Compare exported raw with the original binary conversion
+    raw_bin = Raw(fif_path)
+    trans_bin = raw.info['dev_head_t']['trans']
+    assert_equal(raw_bin.info.keys(), raw.info.keys())
+    trans_transform = raw_bin.info['dev_head_t']['trans']
+    assert_allclose(trans_transform, trans_bin, 0.1)
+
+    # Averaging markers
+    model.markers.mrk3.method = "Average"
+    trans_avg = model.dev_head_trans
+    assert_false(np.all(trans_avg == trans_transform))
+    assert_allclose(trans_avg, trans_bin, 0.1)
+
+    # Test exclusion of one marker
+    model.markers.mrk3.method = "Transform"
+    model.use_mrk = [1, 2, 3, 4]
+    assert_false(np.all(model.dev_head_trans == trans_transform))
+    assert_false(np.all(model.dev_head_trans == trans_avg))
+    assert_false(np.all(model.dev_head_trans == np.eye(4)))
diff --git a/mne/gui/tests/test_marker_gui.py b/mne/gui/tests/test_marker_gui.py
new file mode 100644
index 0000000..e9feb00
--- /dev/null
+++ b/mne/gui/tests/test_marker_gui.py
@@ -0,0 +1,47 @@
+# Authors: Christian Brodbeck <christianbrodbeck at nyu.edu>
+#
+# License: BSD (3-clause)
+
+import os
+
+import numpy as np
+from numpy.testing import assert_array_equal
+from nose.tools import assert_true, assert_false
+
+from mne.fiff.kit.tests import data_dir as kit_data_dir
+from mne.fiff.kit import read_mrk
+from mne.utils import _TempDir, requires_traits
+
+mrk_pre_path = os.path.join(kit_data_dir, 'test_mrk_pre.sqd')
+mrk_post_path = os.path.join(kit_data_dir, 'test_mrk_post.sqd')
+mrk_avg_path = os.path.join(kit_data_dir, 'test_mrk.sqd')
+
+tempdir = _TempDir()
+tgt_fname = os.path.join(tempdir, 'test.txt')
+
+
+ at requires_traits
+def test_combine_markers_model():
+    """Test CombineMarkersModel Traits Model"""
+    from mne.gui._marker_gui import CombineMarkersModel
+
+    model = CombineMarkersModel()
+    assert_false(model.mrk3.can_save)
+    model.mrk1.file = mrk_pre_path
+    assert_true(model.mrk3.can_save)
+    assert_array_equal(model.mrk1.points, model.mrk3.points)
+
+    model.mrk2.file = mrk_pre_path
+    assert_array_equal(model.mrk1.points, model.mrk3.points)
+
+    model.mrk2._clear_fired()
+    model.mrk2.file = mrk_post_path
+    assert_true(np.any(model.mrk3.points))
+
+    model.mrk3.method = 'Average'
+    mrk_avg = read_mrk(mrk_avg_path)
+    assert_array_equal(model.mrk3.points, mrk_avg)
+
+    model.mrk3.save(tgt_fname)
+    mrk_io = read_mrk(tgt_fname)
+    assert_array_equal(mrk_io, model.mrk3.points)
diff --git a/mne/layouts/__init__.py b/mne/layouts/__init__.py
index e612652..58cbdf4 100644
--- a/mne/layouts/__init__.py
+++ b/mne/layouts/__init__.py
@@ -1 +1,2 @@
-from .layout import Layout, make_eeg_layout, make_grid_layout, read_layout
+from .layout import (Layout, make_eeg_layout, make_grid_layout, read_layout,
+                     find_layout)
diff --git a/mne/layouts/layout.py b/mne/layouts/layout.py
index d1a2b52..67eff81 100644
--- a/mne/layouts/layout.py
+++ b/mne/layouts/layout.py
@@ -1,9 +1,18 @@
+# Authors: Alexandre Gramfort <gramfort at nmr.mgh.harvard.edu>
+#          Denis Engemann <d.engemann at fz-juelich.de>
+#          Martin Luessi <mluessi at nmr.mgh.harvard.edu>
+#          Eric Larson <larson.eric.d at gmail.com>
+#
+# License: Simplified BSD
+
+import warnings
 from collections import defaultdict
 import os.path as op
 import numpy as np
 from scipy.optimize import leastsq
 from ..preprocessing.maxfilter import fit_sphere_to_headshape
 from ..fiff import FIFF, pick_types
+from ..utils import _clean_names
 
 
 class Layout(object):
@@ -11,8 +20,10 @@ class Layout(object):
 
     Parameters
     ----------
-    kind : 'Vectorview-all' | 'CTF-275' | 'Vectorview-grad' | 'Vectorview-mag'
-        Type of layout (can also be custom for EEG)
+    kind : str
+        Type of layout (can also be custom for EEG). Valid layouts are
+        {'Vectorview-all', 'Vectorview-grad', 'Vectorview-mag',  'CTF-275',
+         'magnesWH3600'}
     path : string
         Path to folder where to find the layout file.
 
@@ -64,6 +75,10 @@ class Layout(object):
         f.write(out_str)
         f.close()
 
+    def __repr__(self):
+        return '<Layout | %s - Channels: %s ...>' % (self.kind,
+                                                     ', '.join(self.names[:3]))
+
 
 def _read_lout(fname):
     """Aux function"""
@@ -176,6 +191,11 @@ def make_eeg_layout(info, radius=20, width=5, height=4):
     layout : Layout
         The generated Layout
     """
+    if info['dig'] in [[], None]:
+        raise RuntimeError('Did not find any digitization points in the info. '
+                           'Cannot generate layout based on the subject\'s '
+                           'head shape')
+
     radius_head, origin_head, origin_device = fit_sphere_to_headshape(info)
     inds = pick_types(info, meg=False, eeg=True, ref_meg=False,
                       exclude='bads')
@@ -185,7 +205,7 @@ def make_eeg_layout(info, radius=20, width=5, height=4):
         raise ValueError('No EEG digitization points found')
 
     if not len(hsp) == len(names):
-        raise ValueError('Channel names don\'t match digitization values')
+        raise ValueError("Channel names don't match digitization values")
     hsp = np.array(hsp)
 
     # Move points to origin
@@ -197,7 +217,7 @@ def make_eeg_layout(info, radius=20, width=5, height=4):
     phi = np.arctan2(hsp[:, 1], hsp[:, 0])
 
     # Mark the points that might have caused bad angle estimates
-    iffy = np.nonzero(np.sum(hsp[:, :2] ** 2, axis=-1) ** (1. / 2)
+    iffy = np.nonzero(np.sqrt(np.sum(hsp[:, :2] ** 2, axis=-1))
                       < np.finfo(np.float).eps * 10)
     theta[iffy] = 0
     phi[iffy] = 0
@@ -274,39 +294,105 @@ def make_grid_layout(info, picks=None):
     return layout
 
 
-def find_layout(chs):
-    """Choose a layout based on the channels in the chs parameter
+def find_layout(info=None, ch_type=None, chs=None):
+    """Choose a layout based on the channels in the info 'chs' field
 
     Parameters
     ----------
-    chs : list
-        A list of channels as contained in the info['chs'] entry.
+    info : instance of mne.fiff.meas_info.Info | None
+        The measurement info.
+    ch_type : {'mag', 'grad', 'meg', 'eeg'} | None
+        The channel type for selecting single channel layouts.
+        Defaults to None. Note, this argument will only be considered for
+        VectorView type layout. Use `meg` to force using the full layout
+        in situations where the info does only contain one sensor type.
+    chs : instance of mne.fiff.meas_info.Info | None
+        The measurement info. Defaults to None. This keyword is deprecated and
+        will be removed in MNE-Python 0.9. Use `info` instead.
 
     Returns
     -------
     layout : Layout instance | None
         None if layout not found.
     """
+    msg = ("The 'chs' argument is deprecated and will be "
+           "removed in MNE-Python 0.9 Please use "
+           "'info' instead to pass the measurement info")
+    if chs is not None:
+        warnings.warn(msg, DeprecationWarning)
+    elif isinstance(info, list):
+        warnings.warn(msg, DeprecationWarning)
+        chs = info
+    else:
+        chs = info.get('chs')
+    if not chs:
+        raise ValueError('Could not find any channels. The info structure '
+                         'is not valid.')
+
+    our_types = ' or '.join(['`None`', '`mag`', '`grad`', '`meg`'])
+    if ch_type not in (None, 'meg', 'mag', 'grad', 'eeg'):
+        raise ValueError('Invalid channel type (%s) requested '
+                         '`ch_type` must be %s' % (ch_type, our_types))
+
+    coil_types = set([ch['coil_type'] for ch in chs])
+    channel_types = set([ch['kind'] for ch in chs])
 
-    coil_types = np.unique([ch['coil_type'] for ch in chs])
     has_vv_mag = FIFF.FIFFV_COIL_VV_MAG_T3 in coil_types
     has_vv_grad = FIFF.FIFFV_COIL_VV_PLANAR_T1 in coil_types
+    has_vv_meg = has_vv_mag and has_vv_grad
+    has_vv_only_mag = has_vv_mag and not has_vv_grad
+    has_vv_only_grad = has_vv_grad and not has_vv_mag
+    is_old_vv = ' ' in chs[0]['ch_name']
+
     has_4D_mag = FIFF.FIFFV_COIL_MAGNES_MAG in coil_types
-    has_CTF_grad = FIFF.FIFFV_COIL_CTF_GRAD in coil_types
-    if has_vv_mag and has_vv_grad:
+    ctf_other_types = (FIFF.FIFFV_COIL_CTF_REF_MAG,
+                       FIFF.FIFFV_COIL_CTF_REF_GRAD,
+                       FIFF.FIFFV_COIL_CTF_OFFDIAG_REF_GRAD)
+    has_CTF_grad = (FIFF.FIFFV_COIL_CTF_GRAD in coil_types or
+                    (FIFF.FIFFV_MEG_CH in channel_types and
+                     any([k in ctf_other_types for k in coil_types])))
+                    # hack due to MNE-C bug in IO of CTF
+
+    has_any_meg = any([has_vv_mag, has_vv_grad, has_4D_mag, has_CTF_grad])
+    has_eeg_coils = (FIFF.FIFFV_COIL_EEG in coil_types and
+                     FIFF.FIFFV_EEG_CH in channel_types)
+    has_eeg_coils_and_meg = has_eeg_coils and has_any_meg
+    has_eeg_coils_only = has_eeg_coils and not has_any_meg
+
+    if ch_type == "meg" and not has_any_meg:
+        raise RuntimeError('No MEG channels present. Cannot find MEG layout.')
+
+    if ch_type == "eeg" and not has_eeg_coils:
+        raise RuntimeError('No EEG channels present. Cannot find EEG layout.')
+
+    if ((has_vv_meg and ch_type is None) or
+        (any([has_vv_mag, has_vv_grad]) and ch_type == 'meg')):
         layout_name = 'Vectorview-all'
-    elif has_vv_mag:
+    elif has_vv_only_mag or (has_vv_meg and ch_type == 'mag'):
         layout_name = 'Vectorview-mag'
-    elif has_vv_grad:
+    elif has_vv_only_grad or (has_vv_meg and ch_type == 'grad'):
         layout_name = 'Vectorview-grad'
+    elif ((has_eeg_coils_only and ch_type in [None, 'eeg']) or
+          (has_eeg_coils_and_meg and ch_type == 'eeg')):
+        if not isinstance(info, dict):
+            raise RuntimeError('Cannot make EEG layout, no measurement info '
+                               'was passed to `find_layout`')
+        return make_eeg_layout(info)
     elif has_4D_mag:
         layout_name = 'magnesWH3600'
     elif has_CTF_grad:
         layout_name = 'CTF-275'
     else:
         return None
-    
-    return read_layout(layout_name)
+
+    layout = read_layout(layout_name)
+    if not is_old_vv:
+        layout.names = _clean_names(layout.names, remove_whitespace=True)
+    if has_CTF_grad:
+        layout.names = _clean_names(layout.names, before_dash=True)
+
+    return layout
+
 
 def _find_topomap_coords(chs, layout=None):
     """Try to guess the MEG system and return appropriate topomap coordinates
@@ -425,7 +511,8 @@ def _pair_grad_sensors(info, layout=None, topomap_coords=True, exclude='bads'):
 
     if topomap_coords:
         shape = (len(pairs), 2, -1)
-        coords = _find_topomap_coords(grad_chs, layout).reshape(shape).mean(axis=1)
+        coords = (_find_topomap_coords(grad_chs, layout)
+                                      .reshape(shape).mean(axis=1))
         return picks, coords
     else:
         return picks
diff --git a/mne/layouts/tests/test_layout.py b/mne/layouts/tests/test_layout.py
index d6c7598..a7b909e 100644
--- a/mne/layouts/tests/test_layout.py
+++ b/mne/layouts/tests/test_layout.py
@@ -1,10 +1,22 @@
+# Authors: Alexandre Gramfort <gramfort at nmr.mgh.harvard.edu>
+#          Denis Engemann <d.engemann at fz-juelich.de>
+#          Martin Luessi <mluessi at nmr.mgh.harvard.edu>
+#          Eric Larson <larson.eric.d at gmail.com>
+#
+# License: Simplified BSD
+
+import copy
 import os.path as op
+import warnings
+
 import numpy as np
-from nose.tools import assert_true
 from numpy.testing import assert_array_almost_equal, assert_array_equal
+from nose.tools import assert_true, assert_raises
 
-from mne.layouts import make_eeg_layout, make_grid_layout, read_layout
-from mne.fiff import Raw
+from mne.layouts import (make_eeg_layout, make_grid_layout, read_layout, 
+                         find_layout)
+
+from mne.fiff import Raw, pick_types, pick_info
 from mne.utils import _TempDir
 
 fif_fname = op.join(op.dirname(__file__), '..', '..', 'fiff',
@@ -13,6 +25,13 @@ fif_fname = op.join(op.dirname(__file__), '..', '..', 'fiff',
 lout_path = op.join(op.dirname(__file__), '..', '..', 'fiff',
                     'tests', 'data')
 
+bti_dir = op.join(op.dirname(__file__), '..', '..', 'fiff', 'bti',
+                  'tests', 'data')
+
+fname_ctf_raw = op.join(op.dirname(__file__), '..', '..', 'fiff', 'tests',
+                        'data', 'test_ctf_comp_raw.fif')
+
+
 test_info = {'ch_names': ['ICA 001', 'ICA 002', 'EOG 061'],
  'chs': [{'cal': 1,
    'ch_name': 'ICA 001',
@@ -70,6 +89,8 @@ def test_io_layout_lout():
     assert_array_almost_equal(layout.pos, layout_read.pos, decimal=2)
     assert_true(layout.names, layout_read.names)
 
+    print layout  # test repr
+
 
 def test_io_layout_lay():
     """Test IO with .lay files"""
@@ -105,3 +126,81 @@ def test_make_grid_layout():
     assert_array_equal(lout_new.kind, tmp_name)
     assert_array_equal(lout_orig.pos, lout_new.pos)
     assert_array_equal(lout_orig.names, lout_new.names)
+
+
+def test_find_layout():
+    """Test finding layout"""
+    with warnings.catch_warnings(True) as w:
+        find_layout(chs=test_info['chs'])
+        assert_true(w[0].category == DeprecationWarning)
+    with warnings.catch_warnings(True) as w:
+        find_layout(test_info['chs'])
+        assert_true(w[0].category == DeprecationWarning)
+    assert_raises(ValueError, find_layout, dict())
+    assert_raises(ValueError, find_layout, test_info, ch_type='meep')
+        
+    sample_info = Raw(fif_fname).info
+    grads = pick_types(sample_info, meg='grad')
+    sample_info2 = pick_info(sample_info, grads)
+    
+    mags = pick_types(sample_info, meg='mag')
+    sample_info3 = pick_info(sample_info, mags)
+    
+    # mock new convention
+    sample_info4 = copy.deepcopy(sample_info)
+    for ii, name in enumerate(sample_info4['ch_names']):
+        new = name.replace(' ', '')
+        sample_info4['ch_names'][ii] = new
+        sample_info4['chs'][ii]['ch_name'] = new 
+
+    mags = pick_types(sample_info, meg=False, eeg=True)
+    sample_info5 = pick_info(sample_info, mags)
+
+    lout = find_layout(sample_info, ch_type=None)
+    assert_true(lout.kind == 'Vectorview-all')
+    assert_true(all(' ' in k for k in lout.names))
+
+    lout = find_layout(sample_info2, ch_type='meg')
+    assert_true(lout.kind == 'Vectorview-all')
+    
+    # test new vector-view
+    lout = find_layout(sample_info4, ch_type=None)
+    assert_true(lout.kind == 'Vectorview-all')
+    assert_true(all(not ' ' in k for k in lout.names))
+    
+    lout = find_layout(sample_info, ch_type='grad')
+    assert_true(lout.kind == 'Vectorview-grad')
+    lout = find_layout(sample_info2)
+    assert_true(lout.kind == 'Vectorview-grad')
+    lout = find_layout(sample_info2, ch_type='grad')
+    assert_true(lout.kind == 'Vectorview-grad')
+    lout = find_layout(sample_info2, ch_type='meg')
+    assert_true(lout.kind == 'Vectorview-all')
+
+    
+    lout = find_layout(sample_info, ch_type='mag')
+    assert_true(lout.kind == 'Vectorview-mag')
+    lout = find_layout(sample_info3)
+    assert_true(lout.kind == 'Vectorview-mag')
+    lout = find_layout(sample_info3, ch_type='mag')
+    assert_true(lout.kind == 'Vectorview-mag')
+    lout = find_layout(sample_info3, ch_type='meg')
+    assert_true(lout.kind == 'Vectorview-all')
+    # 
+    lout = find_layout(sample_info, ch_type='eeg')
+    assert_true(lout.kind == 'EEG')
+    lout = find_layout(sample_info5)
+    assert_true(lout.kind == 'EEG')
+    lout = find_layout(sample_info5, ch_type='eeg')
+    assert_true(lout.kind == 'EEG')
+    # no common layout, 'meg' option not supported
+
+    fname_bti_raw = op.join(bti_dir, 'exported4D_linux.fif')
+    lout = find_layout(Raw(fname_bti_raw).info)
+    assert_true(lout.kind == 'magnesWH3600')
+    
+    lout = find_layout(Raw(fname_ctf_raw).info)
+    assert_true(lout.kind == 'CTF-275')
+    
+    sample_info5['dig'] = []
+    assert_raises(RuntimeError, find_layout, sample_info5)
diff --git a/mne/minimum_norm/inverse.py b/mne/minimum_norm/inverse.py
index 6c8a79b..08d838e 100644
--- a/mne/minimum_norm/inverse.py
+++ b/mne/minimum_norm/inverse.py
@@ -1163,7 +1163,7 @@ def make_inverse_operator(info, forward, noise_cov, loose=0.2, depth=0.8,
         | | Fixed constraint, | None      | 0.8       | True      | False           | True         |
         | | Depth weighted    |           |           |           |                 |              |
         +---------------------+-----------+-----------+-----------+-----------------+--------------+
-        | | Fixed constraint  | None      | None      | True      | True            | False        |
+        | | Fixed constraint  | None      | None      | True      | True            | True         |
         +---------------------+-----------+-----------+-----------+-----------------+--------------+
 
     Also note that, if the source space (as stored in the forward solution)
@@ -1189,10 +1189,10 @@ def make_inverse_operator(info, forward, noise_cov, loose=0.2, depth=0.8,
                                  'with depth weighting, the forward solution '
                                  'must be free-orientation and in surface '
                                  'orientation')
-        elif forward['surf_ori'] is True:
+        elif forward['surf_ori'] is False:
             raise ValueError('For a fixed orientation inverse solution '
                              'without depth weighting, the forward solution '
-                             'must not be in surface orientation')
+                             'must be in surface orientation')
 
     # depth=None can use fixed fwd, depth=0<x<1 must use free ori
     if depth is not None:
diff --git a/mne/minimum_norm/tests/test_inverse.py b/mne/minimum_norm/tests/test_inverse.py
index 5ae84b3..bbd6dbc 100644
--- a/mne/minimum_norm/tests/test_inverse.py
+++ b/mne/minimum_norm/tests/test_inverse.py
@@ -218,10 +218,6 @@ def test_make_inverse_operator_fixed():
     # can't make fixed inv with depth weighting without free ori fwd
     assert_raises(ValueError, make_inverse_operator, evoked.info, fwd_2,
                   noise_cov, depth=0.8, loose=None, fixed=True)
-    # can't make non-depth-weighted fixed inv with surf_ori fwd
-    # (otherwise the average normal could be employed)
-    assert_raises(ValueError, make_inverse_operator, evoked.info, fwd_op,
-                  noise_cov, depth=None, loose=None, fixed=True)
 
     # compare to C solution w/fixed
     inv_op = make_inverse_operator(evoked.info, fwd_op, noise_cov, depth=0.8,
diff --git a/mne/preprocessing/ica.py b/mne/preprocessing/ica.py
index 8f904a6..35d94b6 100644
--- a/mne/preprocessing/ica.py
+++ b/mne/preprocessing/ica.py
@@ -1198,7 +1198,7 @@ class ICA(object):
         sources = fast_dot(self.unmixing_matrix_, pca_data[:n_components])
 
         if include not in (None, []):
-            mask = np.ones(len(data), dtype=np.bool)
+            mask = np.ones(len(sources), dtype=np.bool)
             mask[np.unique(include)] = False
             sources[mask] = 0.
         elif exclude not in (None, []):
@@ -1621,7 +1621,7 @@ def run_ica(raw, n_components, max_pca_components=100,
     - It is highly recommended to bandpass filter ECG and EOG
     data and pass them instead of the channel names as ecg_ch and eog_ch
     arguments.
-    - please check your results. Detection by kurtosis and variance
+    - Please check your results. Detection by kurtosis and variance
     can be powerful but misclassification of brain signals as
     noise cannot be precluded. If you are not sure set those to None.
     - Consider using shorter times for start_find and stop_find than
@@ -1630,7 +1630,7 @@ def run_ica(raw, n_components, max_pca_components=100,
     Example invocation (taking advantage of defaults):
 
     ica = run_ica(raw, n_components=.9, start_find=10000, stop_find=12000,
-                  ecg_channel='MEG 1531', eog_channel='EOG 061')
+                  ecg_ch='MEG 1531', eog_ch='EOG 061')
 
     Parameters
     ----------
diff --git a/mne/preprocessing/tests/test_ica.py b/mne/preprocessing/tests/test_ica.py
index 44f4cc0..48e353e 100644
--- a/mne/preprocessing/tests/test_ica.py
+++ b/mne/preprocessing/tests/test_ica.py
@@ -261,6 +261,11 @@ def test_ica_additional():
             ica.exclude += [1]
             ica.pick_sources_raw(raw, exclude=[0, 1])
             assert_true(ica.exclude == [0, 1])
+            
+            # test basic include
+            ica.exclude = []
+            ica.pick_sources_raw(raw, include=[1])
+            
 
             ica_raw = ica.sources_as_raw(raw)
             assert_true(ica.exclude == [ica_raw.ch_names.index(e) for e in
diff --git a/mne/source_estimate.py b/mne/source_estimate.py
index 3e2a631..2500c93 100644
--- a/mne/source_estimate.py
+++ b/mne/source_estimate.py
@@ -20,7 +20,7 @@ from .utils import (get_subjects_dir, _check_subject,
                     _check_pandas_index_arguments, _check_pandas_installed,
                     logger, verbose)
 from .viz import plot_source_estimates
-from . fixes import in1d
+from .fixes import in1d
 
 
 def _read_stc(filename):
@@ -690,8 +690,7 @@ class _BaseSourceEstimate(object):
                         tmin=tmin, tstep=width, subject=self.subject)
         return stc
 
-    def transform_data(self, transform_fun, fun_args=None,
-                       idx=None, tmin_idx=None, tmax_idx=None, **kwargs):
+    def transform_data(self, func, idx=None, tmin_idx=None, tmax_idx=None):
         """Get data after a linear (time) transform has been applied
 
         The transorm is applied to each source time course independently.
@@ -699,14 +698,13 @@ class _BaseSourceEstimate(object):
 
         Parameters
         ----------
-        transform_fun : callable
-            The transform to be applied. The first parameter of the function
-            is the input data. The first return value is the transformed
-            data, remaining outputs are ignored. The first dimension of the
+        func : callable
+            The transform to be applied, including parameters (see, e.g.,
+            mne.fixes.partial). The first parameter of the function is the
+            input data. The first return value is the transformed data,
+            remaining outputs are ignored. The first dimension of the
             transformed data has to be the same as the first dimension of the
             input data.
-        fun_args : tuple | None
-            Additional parameters to be passed to transform_fun.
         idx : array | None
             Indicices of source time courses for which to compute transform.
             If None, all time courses are used.
@@ -716,8 +714,6 @@ class _BaseSourceEstimate(object):
         tmax_idx : int | None
             Index of the first time point not to include. If None, time points
             up to (and including) the last time point are included.
-        **kwargs : dict
-            Keyword arguments to be passed to transform_fun.
 
         Returns
         -------
@@ -736,25 +732,20 @@ class _BaseSourceEstimate(object):
             # use all time courses by default
             idx = slice(None, None)
 
-        if fun_args is None:
-            fun_args = tuple()
-
         if self._kernel is None and self._sens_data is None:
             if self._kernel_removed:
                 warnings.warn('Performance can be improved by not accessing '
                               'the data attribute before calling this method.')
 
             # transform source space data directly
-            data_t = transform_fun(self.data[idx, tmin_idx:tmax_idx],
-                                   *fun_args, **kwargs)
+            data_t = func(self.data[idx, tmin_idx:tmax_idx])
 
             if isinstance(data_t, tuple):
                 # use only first return value
                 data_t = data_t[0]
         else:
             # apply transform in sensor space
-            sens_data_t = transform_fun(self._sens_data[:, tmin_idx:tmax_idx],
-                                        *fun_args, **kwargs)
+            sens_data_t = func(self._sens_data[:, tmin_idx:tmax_idx])
 
             if isinstance(sens_data_t, tuple):
                 # use only first return value
@@ -775,8 +766,7 @@ class _BaseSourceEstimate(object):
 
         return data_t
 
-    def transform(self, func, func_args=None,
-                  idx=None, tmin=None, tmax=None, copy=False, **kwargs):
+    def transform(self, func, idx=None, tmin=None, tmax=None, copy=False):
         """Apply linear transform
 
         The transform is applied to each source time course independently.
@@ -784,20 +774,19 @@ class _BaseSourceEstimate(object):
         Parameters
         ----------
         func : callable
-            The transform to be applied. The first parameter of the function
-            is the input data. The first two dimensions of the transformed
-            data should be (i) vertices and (ii) time.  Transforms which yield
-            3D output (e.g. time-frequency transforms) are valid, so long as
-            the first two dimensions are vertices and time.  In this case, the
-            copy parameter must be True and a list of SourceEstimates, rather
-            than a single SourceEstimate, will be returned, one for each index
-            of the 3rd dimension of the transformed data.  In the case of
-            transforms yielding 2D output (e.g. filtering), the user has the
-            option of modifying the input inplace (copy = False) or returning
-            a new instance of SourceEstimate (copy = True) with the
-            transformed data.
-        func_args : tuple | None
-            Additional parameters to be passed to func.
+            The transform to be applied, including parameters (see, e.g.,
+            mne.fixes.partial). The first parameter of the function is the
+            input data. The first two dimensions of the transformed data
+            should be (i) vertices and (ii) time.  Transforms which yield 3D
+            output (e.g. time-frequency transforms) are valid, so long as the
+            first two dimensions are vertices and time.  In this case, the
+            copy parameter (see below) must be True and a list of
+            SourceEstimates, rather than a single instance of SourceEstimate,
+            will be returned, one for each index of the 3rd dimension of the
+            transformed data.  In the case of transforms yielding 2D output
+            (e.g. filtering), the user has the option of modifying the input
+            inplace (copy = False) or returning a new instance of
+            SourceEstimate (copy = True) with the transformed data.
         idx : array | None
             Indices of source time courses for which to compute transform.
             If None, all time courses are used.
@@ -808,8 +797,6 @@ class _BaseSourceEstimate(object):
         copy : bool
             If True, return a new instance of SourceEstimate instead of
             modifying the input inplace.
-        **kwargs : dict
-            Keyword arguments to be passed to func.
 
         Returns
         -------
@@ -842,9 +829,8 @@ class _BaseSourceEstimate(object):
             tmax = float(tmax)
             tmax_idx = np.where(times <= tmax)[0][-1]
 
-        data_t = self.transform_data(func, fun_args=func_args, idx=idx,
-                                     tmin_idx=tmin_idx, tmax_idx=tmax_idx,
-                                     **kwargs)
+        data_t = self.transform_data(func, idx=idx, tmin_idx=tmin_idx,
+                                     tmax_idx=tmax_idx)
 
         # account for change in n_vertices
         if idx is not None:
diff --git a/mne/tests/test_viz.py b/mne/tests/test_viz.py
index 0dbbc24..6b74c38 100644
--- a/mne/tests/test_viz.py
+++ b/mne/tests/test_viz.py
@@ -123,17 +123,19 @@ def test_plot_topo():
     # Show topography
     evoked = _get_epochs().average()
     plot_topo(evoked, layout)
+    warnings.simplefilter('always', UserWarning)
     picked_evoked = pick_channels_evoked(evoked, evoked.ch_names[:3])
 
     # test scaling
-    for ylim in [dict(mag=[-600, 600]), None]:
-        plot_topo([picked_evoked] * 2, layout, ylim=ylim)
+    with warnings.catch_warnings(record=True):
+        for ylim in [dict(mag=[-600, 600]), None]:
+            plot_topo([picked_evoked] * 2, layout, ylim=ylim)
 
-    for evo in [evoked, [evoked, picked_evoked]]:
-        assert_raises(ValueError, plot_topo, evo, layout, color=['y', 'b'])
+        for evo in [evoked, [evoked, picked_evoked]]:
+            assert_raises(ValueError, plot_topo, evo, layout, color=['y', 'b'])
 
-    evoked_delayed_ssp = _get_epochs_delayed_ssp().average()
-    plot_topo(evoked_delayed_ssp, layout, proj='interactive')
+        evoked_delayed_ssp = _get_epochs_delayed_ssp().average()
+        plot_topo(evoked_delayed_ssp, layout, proj='interactive')
 
 
 def test_plot_topo_tfr():
@@ -182,23 +184,23 @@ def test_plot_evoked():
     """Test plotting of evoked
     """
     evoked = _get_epochs().average()
-    evoked.plot(proj=True, hline=[1])
-
-    # plot with bad channels excluded
-    evoked.plot(exclude='bads')
-    evoked.plot(exclude=evoked.info['bads'])  # does the same thing
-
-    # test selective updating of dict keys is working.
-    evoked.plot(hline=[1], units=dict(mag='femto foo'))
-    evoked_delayed_ssp = _get_epochs_delayed_ssp().average()
-    evoked_delayed_ssp.plot(proj='interactive')
-    evoked_delayed_ssp.apply_proj()
-    assert_raises(RuntimeError, evoked_delayed_ssp.plot, proj='interactive')
-    evoked_delayed_ssp.info['projs'] = []
-    assert_raises(RuntimeError, evoked_delayed_ssp.plot, proj='interactive')
-    assert_raises(RuntimeError, evoked_delayed_ssp.plot, proj='interactive',
-                  axes='foo')
-    plt.close('all')
+    with warnings.catch_warnings(record=True):
+        evoked.plot(proj=True, hline=[1])
+        # plot with bad channels excluded
+        evoked.plot(exclude='bads')
+        evoked.plot(exclude=evoked.info['bads'])  # does the same thing
+
+        # test selective updating of dict keys is working.
+        evoked.plot(hline=[1], units=dict(mag='femto foo'))
+        evoked_delayed_ssp = _get_epochs_delayed_ssp().average()
+        evoked_delayed_ssp.plot(proj='interactive')
+        evoked_delayed_ssp.apply_proj()
+        assert_raises(RuntimeError, evoked_delayed_ssp.plot, proj='interactive')
+        evoked_delayed_ssp.info['projs'] = []
+        assert_raises(RuntimeError, evoked_delayed_ssp.plot, proj='interactive')
+        assert_raises(RuntimeError, evoked_delayed_ssp.plot, proj='interactive',
+                      axes='foo')
+        plt.close('all')
 
 
 def test_plot_epochs():
@@ -375,36 +377,38 @@ def test_plot_raw():
     raw = _get_raw()
     events = _get_events()
     plt.close('all')  # ensure all are closed
-    fig = raw.plot(events=events, show_options=True)
-    # test mouse clicks
-    x = fig.get_axes()[0].lines[1].get_xdata().mean()
-    y = fig.get_axes()[0].lines[1].get_ydata().mean()
-    data_ax = fig.get_axes()[0]
-    _fake_click(fig, data_ax, [x, y], xform='data')  # mark a bad channel
-    _fake_click(fig, data_ax, [x, y], xform='data')  # unmark a bad channel
-    _fake_click(fig, data_ax, [0.5, 0.999])  # click elsewhere in first axes
-    _fake_click(fig, fig.get_axes()[1], [0.5, 0.5])  # change time
-    _fake_click(fig, fig.get_axes()[2], [0.5, 0.5])  # change channels
-    _fake_click(fig, fig.get_axes()[3], [0.5, 0.5])  # open SSP window
-    fig.canvas.button_press_event(1, 1, 1)  # outside any axes
-    # sadly these fail when no renderer is used (i.e., when using Agg):
-    #ssp_fig = set(plt.get_fignums()) - set([fig.number])
-    #assert_equal(len(ssp_fig), 1)
-    #ssp_fig = plt.figure(list(ssp_fig)[0])
-    #ax = ssp_fig.get_axes()[0]  # only one axis is used
-    #t = [c for c in ax.get_children() if isinstance(c, matplotlib.text.Text)]
-    #pos = np.array(t[0].get_position()) + 0.01
-    #_fake_click(ssp_fig, ssp_fig.get_axes()[0], pos, xform='data')  # off
-    #_fake_click(ssp_fig, ssp_fig.get_axes()[0], pos, xform='data')  # on
-    # test keypresses
-    fig.canvas.key_press_event('escape')
-    fig.canvas.key_press_event('down')
-    fig.canvas.key_press_event('up')
-    fig.canvas.key_press_event('right')
-    fig.canvas.key_press_event('left')
-    fig.canvas.key_press_event('o')
-    fig.canvas.key_press_event('escape')
-    plt.close('all')
+    with warnings.catch_warnings(record=True):
+        fig = raw.plot(events=events, show_options=True)
+        # test mouse clicks
+        x = fig.get_axes()[0].lines[1].get_xdata().mean()
+        y = fig.get_axes()[0].lines[1].get_ydata().mean()
+        data_ax = fig.get_axes()[0]
+        _fake_click(fig, data_ax, [x, y], xform='data')  # mark a bad channel
+        _fake_click(fig, data_ax, [x, y], xform='data')  # unmark a bad channel
+        _fake_click(fig, data_ax, [0.5, 0.999])  # click elsewhere in first axes
+        _fake_click(fig, fig.get_axes()[1], [0.5, 0.5])  # change time
+        _fake_click(fig, fig.get_axes()[2], [0.5, 0.5])  # change channels
+        _fake_click(fig, fig.get_axes()[3], [0.5, 0.5])  # open SSP window
+        fig.canvas.button_press_event(1, 1, 1)  # outside any axes
+        # sadly these fail when no renderer is used (i.e., when using Agg):
+        #ssp_fig = set(plt.get_fignums()) - set([fig.number])
+        #assert_equal(len(ssp_fig), 1)
+        #ssp_fig = plt.figure(list(ssp_fig)[0])
+        #ax = ssp_fig.get_axes()[0]  # only one axis is used
+        #t = [c for c in ax.get_children() if isinstance(c,
+        #     matplotlib.text.Text)]
+        #pos = np.array(t[0].get_position()) + 0.01
+        #_fake_click(ssp_fig, ssp_fig.get_axes()[0], pos, xform='data')  # off
+        #_fake_click(ssp_fig, ssp_fig.get_axes()[0], pos, xform='data')  # on
+        # test keypresses
+        fig.canvas.key_press_event('escape')
+        fig.canvas.key_press_event('down')
+        fig.canvas.key_press_event('up')
+        fig.canvas.key_press_event('right')
+        fig.canvas.key_press_event('left')
+        fig.canvas.key_press_event('o')
+        fig.canvas.key_press_event('escape')
+        plt.close('all')
 
 
 def test_plot_raw_psds():
@@ -429,28 +433,39 @@ def test_plot_topomap():
     """Test topomap plotting
     """
     # evoked
-    evoked = fiff.read_evoked(evoked_fname, 'Left Auditory',
-                              baseline=(None, 0))
-    evoked.plot_topomap(0.1, 'mag', layout=layout)
-    plot_evoked_topomap(evoked, None, ch_type='mag')
-    times = [0.1, 0.2]
-    plot_evoked_topomap(evoked, times, ch_type='eeg')
-    plot_evoked_topomap(evoked, times, ch_type='grad')
-    plot_evoked_topomap(evoked, times, ch_type='planar1')
-    plot_evoked_topomap(evoked, times, ch_type='planar2')
-    with warnings.catch_warnings(True):  # delaunay triangulation warning
-        plot_evoked_topomap(evoked, times, ch_type='mag', layout='auto')
-    assert_raises(RuntimeError, plot_evoked_topomap, evoked, 0.1, 'mag',
-                  proj='interactive')  # projs have already been applied
-    evoked.proj = False  # let's fake it like they haven't been applied
-    plot_evoked_topomap(evoked, 0.1, 'mag', proj='interactive')
-    assert_raises(RuntimeError, plot_evoked_topomap, evoked, np.repeat(.1, 50))
-    assert_raises(ValueError, plot_evoked_topomap, evoked, [-3e12, 15e6])
-
-    projs = read_proj(ecg_fname)
-    projs = [p for p in projs if p['desc'].lower().find('eeg') < 0]
-    plot_projs_topomap(projs)
-    plt.close('all')
+    warnings.simplefilter('always', UserWarning)
+    with warnings.catch_warnings(record=True):
+        evoked = fiff.read_evoked(evoked_fname, 'Left Auditory',
+                                  baseline=(None, 0))
+        evoked.plot_topomap(0.1, 'mag', layout=layout)
+        plot_evoked_topomap(evoked, None, ch_type='mag')
+        times = [0.1, 0.2]
+        plot_evoked_topomap(evoked, times, ch_type='eeg')
+        plot_evoked_topomap(evoked, times, ch_type='grad')
+        plot_evoked_topomap(evoked, times, ch_type='planar1')
+        plot_evoked_topomap(evoked, times, ch_type='planar2')
+        with warnings.catch_warnings(True):  # delaunay triangulation warning
+            plot_evoked_topomap(evoked, times, ch_type='mag', layout='auto')
+        assert_raises(RuntimeError, plot_evoked_topomap, evoked, 0.1, 'mag',
+                      proj='interactive')  # projs have already been applied
+        evoked.proj = False  # let's fake it like they haven't been applied
+        plot_evoked_topomap(evoked, 0.1, 'mag', proj='interactive')
+        assert_raises(RuntimeError, plot_evoked_topomap, evoked,
+                      np.repeat(.1, 50))
+        assert_raises(ValueError, plot_evoked_topomap, evoked, [-3e12, 15e6])
+
+        projs = read_proj(ecg_fname)
+        projs = [p for p in projs if p['desc'].lower().find('eeg') < 0]
+        plot_projs_topomap(projs)
+        plt.close('all')
+        for ch in evoked.info['chs']:
+            if ch['coil_type'] == fiff.FIFF.FIFFV_COIL_EEG:
+                if ch['eeg_loc'] is not None:
+                    ch['eeg_loc'].fill(0)
+                ch['loc'].fill(0)
+        assert_raises(RuntimeError, plot_evoked_topomap, evoked,
+                      times, ch_type='eeg')
+        
 
 
 def test_compare_fiff():
@@ -470,8 +485,10 @@ def test_plot_ica_topomap():
     ica_picks = fiff.pick_types(raw.info, meg=True, eeg=False, stim=False,
                                 ecg=False, eog=False, exclude='bads')
     ica.decompose_raw(raw, picks=ica_picks)
-    for components in [0, [0], [0, 1], [0, 1] * 7]:
-        ica.plot_topomap(components)
+    warnings.simplefilter('always', UserWarning)
+    with warnings.catch_warnings(record=True):
+        for components in [0, [0], [0, 1], [0, 1] * 7]:
+            ica.plot_topomap(components)
     ica.info = None
     assert_raises(RuntimeError, ica.plot_topomap, 1)
     plt.close('all')
diff --git a/mne/utils.py b/mne/utils.py
index 003421d..b54e19d 100644
--- a/mne/utils.py
+++ b/mne/utils.py
@@ -240,6 +240,12 @@ def pformat(temp, **fmt):
     return formatter.vformat(temp, (), mapping)
 
 
+def trait_wraith(*args, **kwargs):
+    # Stand in for traits to allow importing traits based modules when the
+    # traits library is not installed
+    return lambda x: x
+
+
 ###############################################################################
 # DECORATORS
 
@@ -540,6 +546,7 @@ def make_skipper_dec(module, skip_str):
 
 requires_sklearn = make_skipper_dec('sklearn', 'scikit-learn not installed')
 requires_nitime = make_skipper_dec('nitime', 'nitime not installed')
+requires_traits = make_skipper_dec('traits', 'traits not installed')
 
 
 def _mne_fs_not_in_env():
@@ -1224,3 +1231,31 @@ def _check_pandas_index_arguments(index, defaults):
         options = [', '.join(e) for e in [invalid_choices, defaults]]
         raise ValueError('[%s] is not an valid option. Valid index'
                          'values are \'None\' or %s' % tuple(options))
+
+
+def _clean_names(names, remove_whitespace=False, before_dash=True):
+    """ Remove white-space on topo matching
+
+    This function handles different naming
+    conventions for old VS new VectorView systems (`remove_whitespace`).
+    Also it allows to remove system specific parts in CTF channel names
+    (`before_dash`).
+
+    Usage
+    -----
+    # for new VectorView (only inside layout) 
+    ch_names = _clean_names(epochs.ch_names, remove_whitespace=True)
+
+    # for CTF
+    ch_names = _clean_names(epochs.ch_names, before_dash=True)
+
+    """
+    cleaned = []
+    for name in names:
+        if ' ' in name and remove_whitespace:
+            name = name.replace(' ', '')
+        if '-' in name and before_dash:
+            name = name.split('-')[0]
+        cleaned.append(name)
+
+    return cleaned
diff --git a/mne/viz.py b/mne/viz.py
index 9d41570..758ad06 100644
--- a/mne/viz.py
+++ b/mne/viz.py
@@ -38,7 +38,7 @@ from .fiff import show_fiff, FIFF
 from .fiff.pick import channel_type, pick_types
 from .fiff.proj import make_projector, setup_proj
 from .fixes import normalize_colors
-from .utils import create_chunks
+from .utils import create_chunks, _clean_names
 from .time_frequency import compute_raw_psd
 
 COLORS = ['b', 'g', 'r', 'c', 'm', 'y', 'k', '#473C8B', '#458B74',
@@ -79,35 +79,6 @@ def _mutable_defaults(*mappings):
     return out
 
 
-def _clean_names(names):
-    """ Remove white-space on topo matching
-
-    Over the years, Neuromag systems employed inconsistent handling of
-    white-space in layout names. This function handles different naming
-    conventions and hence should be used in each topography-plot to
-    warrant compatibility across systems.
-
-    Usage
-    -----
-    Wrap this function around channel and layout names:
-    ch_names = _clean_names(epochs.ch_names)
-
-    for n in _clean_names(layout.names):
-        if n in ch_names:
-            # prepare plot
-
-    """
-    cleaned = []
-    for name in names:
-        if ' ' in name:
-            name = name.replace(' ', '')
-        if '-' in name:
-            name = name.split('-')[0]
-        cleaned.append(name)
-
-    return cleaned
-
-
 def _check_delayed_ssp(container):
     """ Aux function to be used for interactive SSP selection
     """
@@ -133,18 +104,19 @@ def tight_layout(pad=1.2, h_pad=None, w_pad=None):
         Defaults to `pad_inches`.
     """
     import matplotlib.pyplot as plt
-    if plt.get_backend().lower() != 'agg':
-        try:
-            plt.tight_layout(pad=pad, h_pad=h_pad, w_pad=w_pad)
-        except:
-            msg = ('Matplotlib function \'tight_layout\'%s.'
-                   ' Skipping subpplot adjusment.')
-            if not hasattr(plt, 'tight_layout'):
-                case = ' is not available'
-            else:
-                case = (' is not supported by your backend: `%s`'
-                        % plt.get_backend())
-            warn(msg % case)
+    try:
+        fig = plt.gcf()
+        fig.tight_layout(pad=pad, h_pad=h_pad, w_pad=w_pad)
+        fig.canvas.draw()
+    except:
+        msg = ('Matplotlib function \'tight_layout\'%s.'
+               ' Skipping subpplot adjusment.')
+        if not hasattr(plt, 'tight_layout'):
+            case = ' is not available'
+        else:
+            case = (' is not supported by your backend: `%s`'
+                    % plt.get_backend())
+        warn(msg % case)
 
 
 def _plot_topo(info=None, times=None, show_func=None, layout=None,
@@ -174,7 +146,7 @@ def _plot_topo(info=None, times=None, show_func=None, layout=None,
             cb_yticks = plt.getp(cb.ax.axes, 'yticklabels')
             plt.setp(cb_yticks, color='w')
         plt.rcParams['axes.edgecolor'] = border
-        for idx, name in enumerate(_clean_names(layout.names)):
+        for idx, name in enumerate(layout.names):
             if name in ch_names:
                 ax = plt.axes(pos[idx], axisbg='k')
                 ch_idx = ch_names.index(name)
@@ -358,11 +330,9 @@ def plot_topo(evoked, layout=None, layout_scale=0.945, color=None,
 
     if layout is None:
         from .layouts.layout import find_layout
-        layout = find_layout(info['chs'])
+        layout = find_layout(info)
 
     # XXX. at the moment we are committed to 1- / 2-sensor-types layouts
-    layout = copy.deepcopy(layout)
-    layout.names = _clean_names(layout.names)
     chs_in_layout = set(layout.names) & set(ch_names)
     types_used = set(channel_type(info, ch_names.index(ch))
                      for ch in chs_in_layout)
@@ -494,9 +464,7 @@ def plot_topo_tfr(epochs, tfr, freq, layout=None, colorbar=True, vmin=None,
 
     if layout is None:
         from .layouts.layout import find_layout
-        layout = find_layout(epochs.info['chs'])
-    layout = copy.deepcopy(layout)
-    layout.names = _clean_names(layout.names)
+        layout = find_layout(epochs.info)
 
     tfr_imshow = partial(_imshow_tfr, tfr=tfr.copy(), freq=freq)
 
@@ -579,9 +547,7 @@ def plot_topo_power(epochs, power, freq, layout=None, baseline=None,
         vmax = power.max()
     if layout is None:
         from .layouts.layout import find_layout
-        layout = find_layout(epochs.info['chs'])
-    layout = copy.deepcopy(layout)
-    layout.names = _clean_names(layout.names)
+        layout = find_layout(epochs.info)
 
     power_imshow = partial(_imshow_tfr, tfr=power.copy(), freq=freq)
 
@@ -662,9 +628,7 @@ def plot_topo_phase_lock(epochs, phase, freq, layout=None, baseline=None,
         vmax = phase.max()
     if layout is None:
         from .layouts.layout import find_layout
-        layout = find_layout(epochs.info['chs'])
-    layout = copy.deepcopy(layout)
-    layout.names = _clean_names(layout.names)
+        layout = find_layout(epochs.info)
 
     phase_imshow = partial(_imshow_tfr, tfr=phase.copy(), freq=freq)
 
@@ -741,7 +705,7 @@ def plot_topo_image_epochs(epochs, layout=None, sigma=0.3, vmin=None,
 
     Returns
     -------
-    fig : instacne fo matplotlib figure
+    fig : instance of matplotlib figure
         Figure distributing one image per channel across sensor topography.
     """
     scalings = _mutable_defaults(('scalings', scalings))[0]
@@ -752,9 +716,7 @@ def plot_topo_image_epochs(epochs, layout=None, sigma=0.3, vmin=None,
         vmax = data.max()
     if layout is None:
         from .layouts.layout import find_layout
-        layout = find_layout(epochs.info['chs'])
-    layout = copy.deepcopy(layout)
-    layout.names = _clean_names(layout.names)
+        layout = find_layout(epochs.info)
 
     erf_imshow = partial(_erfimage_imshow, scalings=scalings, order=order,
                          data=data, epochs=epochs, sigma=sigma)
@@ -959,6 +921,11 @@ def plot_projs_topomap(projs, layout=None, cmap='RdBu_r', sensors='k,',
         multiple topomaps at a time).
     show : bool
         Show figures if True
+
+    Returns
+    -------
+    fig : instance of matplotlib figure
+        Figure distributing one image per channel across sensor topography.
     """
     import matplotlib.pyplot as plt
 
@@ -973,7 +940,8 @@ def plot_projs_topomap(projs, layout=None, cmap='RdBu_r', sensors='k,',
     nrows = math.floor(math.sqrt(n_projs))
     ncols = math.ceil(n_projs / nrows)
 
-    plt.clf()
+    fig = plt.gcf()
+    fig.clear()
     for k, proj in enumerate(projs):
 
         ch_names = _clean_names(proj['data']['col_names'])
@@ -981,8 +949,6 @@ def plot_projs_topomap(projs, layout=None, cmap='RdBu_r', sensors='k,',
 
         idx = []
         for l in layout:
-            l = copy.deepcopy(l)
-            l.names = _clean_names(l.names)
             is_vv = l.kind.startswith('Vectorview')
             if is_vv:
                 from .layouts.layout import _pair_grad_sensors_from_ch_names
@@ -1013,9 +979,11 @@ def plot_projs_topomap(projs, layout=None, cmap='RdBu_r', sensors='k,',
         else:
             raise RuntimeError('Cannot find a proper layout for projection %s'
                                % proj['desc'])
-
-    if show:
-        plt.show()
+    fig = ax.get_figure()
+    if show and plt.get_backend() != 'agg':
+        fig.show()
+    
+    return fig
 
 
 def plot_topomap(data, pos, vmax=None, cmap='RdBu_r', sensors='k,', res=100,
@@ -1071,6 +1039,10 @@ def plot_topomap(data, pos, vmax=None, cmap='RdBu_r', sensors='k,', res=100,
 
     xmin, xmax = pos_x.min(), pos_x.max()
     ymin, ymax = pos_y.min(), pos_y.max()
+    if any([not pos_y.any(), not pos_x.any()]):
+        raise RuntimeError('No position information found, cannot compute '
+                           'geometries for topomap.')
+
     triang = delaunay.Triangulation(pos_x, pos_y)
     interp = triang.linear_interpolator(data)
     x = np.linspace(xmin, xmax, res)
@@ -1171,7 +1143,6 @@ def plot_evoked(evoked, picks=None, exclude='bads', unit=True, show=True,
     fig = None
     if axes is None:
         fig, axes = plt.subplots(n_channel_types, 1)
-
     if isinstance(axes, plt.Axes):
         axes = [axes]
     elif isinstance(axes, np.ndarray):
@@ -1184,7 +1155,6 @@ def plot_evoked(evoked, picks=None, exclude='bads', unit=True, show=True,
         raise ValueError('Number of axes (%g) must match number of channel '
                          'types (%g)' % (len(axes), n_channel_types))
 
-
     # instead of projecting during each iteration let's use the mixin here.
     if proj is True and evoked.proj is not True:
         evoked = evoked.copy()
@@ -1241,6 +1211,7 @@ def plot_evoked(evoked, picks=None, exclude='bads', unit=True, show=True,
 
     if show and plt.get_backend() != 'agg':
         fig.show()
+        fig.canvas.draw()  # for axes plots update axes.
 
     return fig
 
@@ -1300,7 +1271,7 @@ def _draw_proj_checkbox(event, params, draw_current_state=True):
     params['proj_checks'] = proj_checks
     # this should work for non-test cases
     try:
-        fig_proj.canvas.show()
+        fig_proj.show()
     except Exception:
         pass
 
@@ -1910,7 +1881,7 @@ def _prepare_topo_plot(obj, ch_type, layout):
     info = copy.deepcopy(obj.info)
     if layout is None and ch_type is not 'eeg':
         from .layouts.layout import find_layout
-        layout = find_layout(info['chs'])
+        layout = find_layout(info)
     elif layout == 'auto':
         layout = None
 
@@ -1918,10 +1889,6 @@ def _prepare_topo_plot(obj, ch_type, layout):
     for ii, this_ch in enumerate(info['chs']):
         this_ch['ch_name'] = info['ch_names'][ii]
 
-    if layout is not None:
-        layout = copy.deepcopy(layout)
-        layout.names = _clean_names(layout.names)
-
     # special case for merging grad channels
     if (ch_type == 'grad' and FIFF.FIFFV_COIL_VV_PLANAR_T1 in
                     np.unique([ch['coil_type'] for ch in info['chs']])):
@@ -3246,6 +3213,8 @@ def _epochs_axes_onclick(event, params):
     """Aux function"""
     reject_color = (0.8, 0.8, 0.8)
     ax = event.inaxes
+    if event.inaxes is None:
+        return
     p = params
     here = vars(ax)[p['axes_handler'][0]]
     if here.get('reject', None) is False:
diff --git a/setup.py b/setup.py
index 72ad6db..ae05de2 100755
--- a/setup.py
+++ b/setup.py
@@ -70,6 +70,7 @@ if __name__ == "__main__":
                     'mne.fiff.edf', 'mne.fiff.edf.tests',
                     'mne.fiff.brainvision', 'mne.fiff.brainvision.tests',
                     'mne.forward', 'mne.forward.tests',
+                    'mne.gui', 'mne.gui.tests',
                     'mne.layouts', 'mne.layouts.tests',
                     'mne.minimum_norm', 'mne.minimum_norm.tests',
                     'mne.mixed_norm',

-- 
Alioth's /git/debian-med/git-commit-notice on /srv/git.debian.org/git/debian-med/mne-python.git



More information about the debian-med-commit mailing list