[med-svn] [python-mne] 129/376: fix patch_info, commenting untested code, increase coverage
Yaroslav Halchenko
debian at onerussian.com
Fri Nov 27 17:22:20 UTC 2015
This is an automated email from the git hooks/post-receive script.
yoh pushed a commit to annotated tag v0.1
in repository python-mne.
commit 814702447e1459f18946ac3d9972476ce133260a
Author: Alexandre Gramfort <alexandre.gramfort at inria.fr>
Date: Sat Mar 12 16:33:56 2011 -0500
fix patch_info, commenting untested code, increase coverage
---
doc/source/manual/utilities.rst | 2 +-
examples/plot_read_source_space.py | 3 +-
mne/fiff/compensator.py | 312 ++++++++++++++++++-------------------
mne/fiff/ctf.py | 210 +++++++++++++------------
mne/fiff/meas_info.py | 38 +----
mne/forward.py | 4 +-
mne/inverse.py | 4 +-
mne/layouts/layout.py | 36 ++---
mne/preprocessing.py | 109 ++++++-------
mne/source_space.py | 91 +++++------
mne/tests/test_source_space.py | 2 +-
11 files changed, 388 insertions(+), 423 deletions(-)
diff --git a/doc/source/manual/utilities.rst b/doc/source/manual/utilities.rst
index 890453d..cffa0c5 100644
--- a/doc/source/manual/utilities.rst
+++ b/doc/source/manual/utilities.rst
@@ -624,7 +624,7 @@ with the patch information included. In addition to the patch information, mne_a
optionally calculate distances, along the cortical surface, between
the vertices selected to the source space.
-.. note:: Depending on the speed of your computer and the options selected, mne_add_patch_info takes 5 - 30 minutes to run.
+.. note:: Depending on the speed of your computer and the options selected, mne_add_patch_info takes 5 - 30 minutes to run.
.. _CJAGCDCC:
diff --git a/examples/plot_read_source_space.py b/examples/plot_read_source_space.py
index eb216ef..5c13713 100644
--- a/examples/plot_read_source_space.py
+++ b/examples/plot_read_source_space.py
@@ -15,7 +15,8 @@ import mne
from mne.datasets import sample
data_path = sample.data_path('.')
-fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis-eeg-oct-6-fwd.fif')
+# fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis-eeg-oct-6-fwd.fif')
+fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis-eeg-oct-6p-fwd.fif')
add_geom = True # include high resolution source space
src = mne.read_source_spaces(fname, add_geom=add_geom)
diff --git a/mne/fiff/compensator.py b/mne/fiff/compensator.py
index 8290f73..b64dfbc 100644
--- a/mne/fiff/compensator.py
+++ b/mne/fiff/compensator.py
@@ -20,159 +20,159 @@ def get_current_comp(info):
return comp
-def findall(L, value, start=0):
- """Returns indices of all occurence of value in list L starting from start
- """
- c = L.count(value)
- if c == 0:
- return list()
- else:
- ind = list()
- i = start-1
- for _ in range(c):
- i = L.index(value, i+1)
- ind.append(i)
- return ind
-
-
-def _make_compensator(info, kind):
- """Auxiliary function for make_compensator
- """
- for k in range(len(info['comps'])):
- if info['comps'][k]['kind'] == kind:
- this_data = info['comps'][k]['data']
-
- # Create the preselector
- presel = np.zeros((this_data['ncol'], info['nchan']))
- for col, col_name in enumerate(this_data['col_names']):
- ind = findall(info['ch_names'], col_name)
- if len(ind) == 0:
- raise ValueError, 'Channel %s is not available in data' % \
- col_name
- elif len(ind) > 1:
- raise ValueError, 'Ambiguous channel %s' % col_name
- presel[col, ind] = 1.0
-
- # Create the postselector
- postsel = np.zeros((info['nchan'], this_data['nrow']))
- for c, ch_name in enumerate(info['ch_names']):
- ind = findall(this_data['row_names'], ch_name)
- if len(ind) > 1:
- raise ValueError, 'Ambiguous channel %s' % ch_name
- elif len(ind) == 1:
- postsel[c, ind] = 1.0
-
- this_comp = np.dot(postsel, np.dot(this_data['data'], presel))
- return this_comp
-
- return []
-
-
-def make_compensator(info, from_, to, exclude_comp_chs=False):
- """ XXX : bug !!! 2 make_compensator functions
- %
- % [comp] = mne_make_compensator(info,from,to,exclude_comp_chs)
- %
- % info - measurement info as returned by the fif reading routines
- % from - compensation in the input data
- % to - desired compensation in the output
- % exclude_comp_chs - exclude compensation channels from the output (optional)
- %
-
- %
- % Create a compensation matrix to bring the data from one compensation
- % state to another
- %
- """
-
- if from_ == to:
- comp = np.zeros((info['nchan'], info['nchan']))
- return comp
-
- if from_ == 0:
- C1 = np.zeros((info['nchan'], info['nchan']))
- else:
- try:
- C1 = _make_compensator(info, from_)
- except Exception as inst:
- raise ValueError, 'Cannot create compensator C1 (%s)' % inst
-
- if len(C1) == 0:
- raise ValueError, ('Desired compensation matrix (kind = %d) not'
- ' found' % from_)
-
-
- if to == 0:
- C2 = np.zeros((info['nchan'], info['nchan']))
- else:
- try:
- C2 = _make_compensator(info, to)
- except Exception as inst:
- raise ValueError, 'Cannot create compensator C2 (%s)' % inst
-
- if len(C2) == 0:
- raise ValueError, ('Desired compensation matrix (kind = %d) not '
- 'found' % to)
-
-
- # s_orig = s_from + C1*s_from = (I + C1)*s_from
- # s_to = s_orig - C2*s_orig = (I - C2)*s_orig
- # s_to = (I - C2)*(I + C1)*s_from = (I + C1 - C2 - C2*C1)*s_from
- comp = np.eye(info['nchan']) + C1 - C2 - C2*C1
-
- if exclude_comp_chs:
- pick = np.zeros((info['nchan'], info['nchan']))
- npick = 0
- for k, chan in info['chs']:
- if chan['kind'] != FIFF.FIFFV_REF_MEG_CH:
- npick += 1
- pick[npick] = k
-
- if npick == 0:
- raise ValueError, ('Nothing remains after excluding the '
- 'compensation channels')
-
- comp = comp[pick[1:npick], :]
-
- return comp
-
-
-def compensate_to(data, to):
- """
- %
- % [newdata] = mne_compensate_to(data,to)
- %
- % Apply compensation to the data as desired
- %
- """
-
- newdata = data.copy()
- now = get_current_comp(newdata['info'])
-
- # Are we there already?
- if now == to:
- print 'Data are already compensated as desired'
-
- # Make the compensator and apply it to all data sets
- comp = make_compensator(newdata['info'], now, to)
- for k in range(len(newdata['evoked'])):
- newdata['evoked'][k]['epochs'] = np.dot(comp,
- newdata['evoked'][k]['epochs'])
-
- # Update the compensation info in the channel descriptors
- newdata['info']['chs'] = set_current_comp(newdata['info']['chs'], to)
- return newdata
-
-
-def set_current_comp(chs, value):
- """Set the current compensation value in the channel info structures
- """
- new_chs = chs
-
- lower_half = int('FFFF', 16) # hex2dec('FFFF')
- for k in range(len(chs)):
- if chs[k].kind == FIFF.FIFFV_MEG_CH:
- coil_type = float(chs[k]['coil_type']) & lower_half
- new_chs[k]['coil_type'] = int(coil_type | (value << 16))
-
- return new_chs
+# def findall(L, value, start=0):
+# """Returns indices of all occurence of value in list L starting from start
+# """
+# c = L.count(value)
+# if c == 0:
+# return list()
+# else:
+# ind = list()
+# i = start-1
+# for _ in range(c):
+# i = L.index(value, i+1)
+# ind.append(i)
+# return ind
+
+
+# def _make_compensator(info, kind):
+# """Auxiliary function for make_compensator
+# """
+# for k in range(len(info['comps'])):
+# if info['comps'][k]['kind'] == kind:
+# this_data = info['comps'][k]['data']
+#
+# # Create the preselector
+# presel = np.zeros((this_data['ncol'], info['nchan']))
+# for col, col_name in enumerate(this_data['col_names']):
+# ind = findall(info['ch_names'], col_name)
+# if len(ind) == 0:
+# raise ValueError, 'Channel %s is not available in data' % \
+# col_name
+# elif len(ind) > 1:
+# raise ValueError, 'Ambiguous channel %s' % col_name
+# presel[col, ind] = 1.0
+#
+# # Create the postselector
+# postsel = np.zeros((info['nchan'], this_data['nrow']))
+# for c, ch_name in enumerate(info['ch_names']):
+# ind = findall(this_data['row_names'], ch_name)
+# if len(ind) > 1:
+# raise ValueError, 'Ambiguous channel %s' % ch_name
+# elif len(ind) == 1:
+# postsel[c, ind] = 1.0
+#
+# this_comp = np.dot(postsel, np.dot(this_data['data'], presel))
+# return this_comp
+#
+# return []
+
+
+# def make_compensator(info, from_, to, exclude_comp_chs=False):
+# """ XXX : bug !!! 2 make_compensator functions
+# %
+# % [comp] = mne_make_compensator(info,from,to,exclude_comp_chs)
+# %
+# % info - measurement info as returned by the fif reading routines
+# % from - compensation in the input data
+# % to - desired compensation in the output
+# % exclude_comp_chs - exclude compensation channels from the output (optional)
+# %
+#
+# %
+# % Create a compensation matrix to bring the data from one compensation
+# % state to another
+# %
+# """
+#
+# if from_ == to:
+# comp = np.zeros((info['nchan'], info['nchan']))
+# return comp
+#
+# if from_ == 0:
+# C1 = np.zeros((info['nchan'], info['nchan']))
+# else:
+# try:
+# C1 = _make_compensator(info, from_)
+# except Exception as inst:
+# raise ValueError, 'Cannot create compensator C1 (%s)' % inst
+#
+# if len(C1) == 0:
+# raise ValueError, ('Desired compensation matrix (kind = %d) not'
+# ' found' % from_)
+#
+#
+# if to == 0:
+# C2 = np.zeros((info['nchan'], info['nchan']))
+# else:
+# try:
+# C2 = _make_compensator(info, to)
+# except Exception as inst:
+# raise ValueError, 'Cannot create compensator C2 (%s)' % inst
+#
+# if len(C2) == 0:
+# raise ValueError, ('Desired compensation matrix (kind = %d) not '
+# 'found' % to)
+#
+#
+# # s_orig = s_from + C1*s_from = (I + C1)*s_from
+# # s_to = s_orig - C2*s_orig = (I - C2)*s_orig
+# # s_to = (I - C2)*(I + C1)*s_from = (I + C1 - C2 - C2*C1)*s_from
+# comp = np.eye(info['nchan']) + C1 - C2 - C2*C1
+#
+# if exclude_comp_chs:
+# pick = np.zeros((info['nchan'], info['nchan']))
+# npick = 0
+# for k, chan in info['chs']:
+# if chan['kind'] != FIFF.FIFFV_REF_MEG_CH:
+# npick += 1
+# pick[npick] = k
+#
+# if npick == 0:
+# raise ValueError, ('Nothing remains after excluding the '
+# 'compensation channels')
+#
+# comp = comp[pick[1:npick], :]
+#
+# return comp
+
+
+# def compensate_to(data, to):
+# """
+# %
+# % [newdata] = mne_compensate_to(data,to)
+# %
+# % Apply compensation to the data as desired
+# %
+# """
+#
+# newdata = data.copy()
+# now = get_current_comp(newdata['info'])
+#
+# # Are we there already?
+# if now == to:
+# print 'Data are already compensated as desired'
+#
+# # Make the compensator and apply it to all data sets
+# comp = make_compensator(newdata['info'], now, to)
+# for k in range(len(newdata['evoked'])):
+# newdata['evoked'][k]['epochs'] = np.dot(comp,
+# newdata['evoked'][k]['epochs'])
+#
+# # Update the compensation info in the channel descriptors
+# newdata['info']['chs'] = set_current_comp(newdata['info']['chs'], to)
+# return newdata
+
+
+# def set_current_comp(chs, value):
+# """Set the current compensation value in the channel info structures
+# """
+# new_chs = chs
+#
+# lower_half = int('FFFF', 16) # hex2dec('FFFF')
+# for k in range(len(chs)):
+# if chs[k].kind == FIFF.FIFFV_MEG_CH:
+# coil_type = float(chs[k]['coil_type']) & lower_half
+# new_chs[k]['coil_type'] = int(coil_type | (value << 16))
+#
+# return new_chs
diff --git a/mne/fiff/ctf.py b/mne/fiff/ctf.py
index dcf358c..6386520 100644
--- a/mne/fiff/ctf.py
+++ b/mne/fiff/ctf.py
@@ -109,88 +109,91 @@ def read_ctf_comp(fid, node, chs):
comps = dir_tree_find(node, FIFF.FIFFB_MNE_CTF_COMP_DATA)
for node in comps:
-
- # Read the data we need
- mat = _read_named_matrix(fid, node, FIFF.FIFF_MNE_CTF_COMP_DATA)
- for p in range(node.nent):
- kind = node.dir[p].kind
- pos = node.dir[p].pos
- if kind == FIFF.FIFF_MNE_CTF_COMP_KIND:
- tag = read_tag(fid, pos)
- break
- else:
- raise ValueError, 'Compensation type not found'
-
- # Get the compensation kind and map it to a simple number
- one = dict(ctfkind=tag.data, kind=-1)
- del tag
-
- if one.ctfkind == int('47314252', 16): # hex2dec('47314252'):
- one.kind = 1
- elif one.ctfkind == int('47324252', 16): # hex2dec('47324252'):
- one.kind = 2
- elif one.ctfkind == int('47334252', 16): # hex2dec('47334252'):
- one.kind = 3
- else:
- one.kind = one.ctfkind
-
- for p in range(node.nent):
- kind = node.dir[p].kind
- pos = node.dir[p].pos
- if kind == FIFF.FIFF_MNE_CTF_COMP_CALIBRATED:
- tag = read_tag(fid, pos)
- calibrated = tag.data
- break
- else:
- calibrated = False
-
- one['save_calibrated'] = calibrated
- one['rowcals'] = np.ones(1, mat.shape[0])
- one['colcals'] = np.ones(1, mat.shape[1])
- if not calibrated:
- #
- # Calibrate...
- #
- # Do the columns first
- #
- ch_names = []
- for p in range(len(chs)):
- ch_names.append(chs[p].ch_name)
-
- col_cals = np.zeros(mat.data.shape[1])
- for col in range(mat.data.shape[1]):
- p = ch_names.count(mat.col_names[col])
- if p == 0:
- raise ValueError, 'Channel %s is not available in data' \
- % mat.col_names[col]
- elif p > 1:
- raise ValueError, 'Ambiguous channel %s' % \
- mat.col_names[col]
-
- col_cals[col] = 1.0 / (chs[p].range * chs[p].cal)
-
- # Then the rows
- row_cals = np.zeros(mat.data.shape[0])
- for row in range(mat.data.shape[0]):
- p = ch_names.count(mat.row_names[row])
- if p == 0:
- raise ValueError, 'Channel %s is not available in data' \
- % mat.row_names[row]
- elif p > 1:
- raise ValueError, 'Ambiguous channel %s' % \
- mat.row_names[row]
-
- row_cals[row] = chs[p].range * chs[p].cal
-
- mat.data = np.dot(np.diag(row_cals), np.dot(mat.data,
- np.diag(col_cals)))
- one.rowcals = row_cals
- one.colcals = col_cals
-
- one.data = mat
- compdata.append(one)
- del row_cals
- del col_cals
+ # XXX
+ raise NotImplementedError, "CTF data processing is not supported yet"
+
+ #
+ # # Read the data we need
+ # mat = _read_named_matrix(fid, node, FIFF.FIFF_MNE_CTF_COMP_DATA)
+ # for p in range(node.nent):
+ # kind = node.dir[p].kind
+ # pos = node.dir[p].pos
+ # if kind == FIFF.FIFF_MNE_CTF_COMP_KIND:
+ # tag = read_tag(fid, pos)
+ # break
+ # else:
+ # raise ValueError, 'Compensation type not found'
+ #
+ # # Get the compensation kind and map it to a simple number
+ # one = dict(ctfkind=tag.data, kind=-1)
+ # del tag
+ #
+ # if one.ctfkind == int('47314252', 16): # hex2dec('47314252'):
+ # one.kind = 1
+ # elif one.ctfkind == int('47324252', 16): # hex2dec('47324252'):
+ # one.kind = 2
+ # elif one.ctfkind == int('47334252', 16): # hex2dec('47334252'):
+ # one.kind = 3
+ # else:
+ # one.kind = one.ctfkind
+ #
+ # for p in range(node.nent):
+ # kind = node.dir[p].kind
+ # pos = node.dir[p].pos
+ # if kind == FIFF.FIFF_MNE_CTF_COMP_CALIBRATED:
+ # tag = read_tag(fid, pos)
+ # calibrated = tag.data
+ # break
+ # else:
+ # calibrated = False
+ #
+ # one['save_calibrated'] = calibrated
+ # one['rowcals'] = np.ones(1, mat.shape[0])
+ # one['colcals'] = np.ones(1, mat.shape[1])
+ # if not calibrated:
+ # #
+ # # Calibrate...
+ # #
+ # # Do the columns first
+ # #
+ # ch_names = []
+ # for p in range(len(chs)):
+ # ch_names.append(chs[p].ch_name)
+ #
+ # col_cals = np.zeros(mat.data.shape[1])
+ # for col in range(mat.data.shape[1]):
+ # p = ch_names.count(mat.col_names[col])
+ # if p == 0:
+ # raise ValueError, 'Channel %s is not available in data' \
+ # % mat.col_names[col]
+ # elif p > 1:
+ # raise ValueError, 'Ambiguous channel %s' % \
+ # mat.col_names[col]
+ #
+ # col_cals[col] = 1.0 / (chs[p].range * chs[p].cal)
+ #
+ # # Then the rows
+ # row_cals = np.zeros(mat.data.shape[0])
+ # for row in range(mat.data.shape[0]):
+ # p = ch_names.count(mat.row_names[row])
+ # if p == 0:
+ # raise ValueError, 'Channel %s is not available in data' \
+ # % mat.row_names[row]
+ # elif p > 1:
+ # raise ValueError, 'Ambiguous channel %s' % \
+ # mat.row_names[row]
+ #
+ # row_cals[row] = chs[p].range * chs[p].cal
+ #
+ # mat.data = np.dot(np.diag(row_cals), np.dot(mat.data,
+ # np.diag(col_cals)))
+ # one.rowcals = row_cals
+ # one.colcals = col_cals
+ #
+ # one.data = mat
+ # compdata.append(one)
+ # del row_cals
+ # del col_cals
if len(compdata) > 0:
print '\tRead %d compensation matrices' % len(compdata)
@@ -219,23 +222,26 @@ def write_ctf_comp(fid, comps):
if len(comps) <= 0:
return
- # This is very simple in fact
- start_block(fid, FIFF.FIFFB_MNE_CTF_COMP)
- for comp in comps:
- start_block(fid, FIFF.FIFFB_MNE_CTF_COMP_DATA)
- # Write the compensation kind
- write_int(fid, FIFF.FIFF_MNE_CTF_COMP_KIND, comp['ctfkind'])
- write_int(fid, FIFF.FIFF_MNE_CTF_COMP_CALIBRATED,
- comp['save_calibrated'])
-
- # Write an uncalibrated or calibrated matrix
- import pdb; pdb.set_trace()
-
- comp['data']['data'] = linalg.inv(
- np.dot(np.diag(comp['rowcals'].ravel())),
- np.dot(comp.data.data,
- linalg.inv(np.diag(comp.colcals.ravel()))))
- write_named_matrix(fid, FIFF.FIFF_MNE_CTF_COMP_DATA, comp['data'])
- end_block(fid, FIFF.FIFFB_MNE_CTF_COMP_DATA)
-
- end_block(fid, FIFF.FIFFB_MNE_CTF_COMP)
+ # XXX
+ raise NotImplementedError, "CTF data processing is not supported yet"
+
+ # # This is very simple in fact
+ # start_block(fid, FIFF.FIFFB_MNE_CTF_COMP)
+ # for comp in comps:
+ # start_block(fid, FIFF.FIFFB_MNE_CTF_COMP_DATA)
+ # # Write the compensation kind
+ # write_int(fid, FIFF.FIFF_MNE_CTF_COMP_KIND, comp['ctfkind'])
+ # write_int(fid, FIFF.FIFF_MNE_CTF_COMP_CALIBRATED,
+ # comp['save_calibrated'])
+ #
+ # # Write an uncalibrated or calibrated matrix
+ # import pdb; pdb.set_trace()
+ #
+ # comp['data']['data'] = linalg.inv(
+ # np.dot(np.diag(comp['rowcals'].ravel())),
+ # np.dot(comp.data.data,
+ # linalg.inv(np.diag(comp.colcals.ravel()))))
+ # write_named_matrix(fid, FIFF.FIFF_MNE_CTF_COMP_DATA, comp['data'])
+ # end_block(fid, FIFF.FIFFB_MNE_CTF_COMP_DATA)
+ #
+ # end_block(fid, FIFF.FIFFB_MNE_CTF_COMP)
diff --git a/mne/fiff/meas_info.py b/mne/fiff/meas_info.py
index 8e1f936..4aaed58 100644
--- a/mne/fiff/meas_info.py
+++ b/mne/fiff/meas_info.py
@@ -6,7 +6,6 @@
import numpy as np
from .tree import dir_tree_find
-from .open import fiff_open
from .constants import FIFF
from .tag import read_tag
from .proj import read_proj
@@ -14,14 +13,13 @@ from .ctf import read_ctf_comp
from .channels import _read_bad_channels
-def read_meas_info(source, tree=None):
+def read_meas_info(fid, tree):
"""Read the measurement info
Parameters
----------
- source: string or file
- If string it is the file name otherwise it's the file descriptor.
- If tree is missing, the meas output argument is None
+ fid: file
+ Open file descriptor
tree: tree
FIF tree structure
@@ -35,33 +33,18 @@ def read_meas_info(source, tree=None):
Node in tree that contains the info.
"""
- if tree is None:
- fid, tree, _ = fiff_open(source)
- open_here = True
- else:
- fid = source
- open_here = False
-
# Find the desired blocks
meas = dir_tree_find(tree, FIFF.FIFFB_MEAS)
if len(meas) == 0:
- if open_here:
- fid.close()
raise ValueError, 'Could not find measurement data'
if len(meas) > 1:
- if open_here:
- fid.close()
raise ValueError, 'Cannot read more that 1 measurement data'
meas = meas[0]
meas_info = dir_tree_find(meas, FIFF.FIFFB_MEAS_INFO)
if len(meas_info) == 0:
- if open_here:
- fid.close()
raise ValueError, 'Could not find measurement info'
if len(meas_info) > 1:
- if open_here:
- fid.close()
raise ValueError, 'Cannot read more that 1 measurement info'
meas_info = meas_info[0]
@@ -109,23 +92,15 @@ def read_meas_info(source, tree=None):
# Check that we have everything we need
if nchan is None:
- if open_here:
- fid.close()
raise ValueError, 'Number of channels in not defined'
if sfreq is None:
- if open_here:
- fid.close()
raise ValueError, 'Sampling frequency is not defined'
if len(chs) == 0:
- if open_here:
- fid.close()
raise ValueError, 'Channel information not defined'
if len(chs) != nchan:
- if open_here:
- fid.close()
raise ValueError, 'Incorrect number of channel definitions found'
if dev_head_t is None or ctf_head_t is None:
@@ -151,12 +126,8 @@ def read_meas_info(source, tree=None):
isotrak = isotrak[0]
else:
if len(isotrak) == 0:
- if open_here:
- fid.close()
raise ValueError, 'Isotrak not found'
if len(isotrak) > 1:
- if open_here:
- fid.close()
raise ValueError, 'Multiple Isotrak found'
dig = []
@@ -252,8 +223,5 @@ def read_meas_info(source, tree=None):
info['acq_pars'] = acq_pars
info['acq_stim'] = acq_stim
- if open_here:
- fid.close()
-
return info, meas
diff --git a/mne/forward.py b/mne/forward.py
index 1e7c28a..2389fc4 100644
--- a/mne/forward.py
+++ b/mne/forward.py
@@ -13,7 +13,7 @@ from .fiff.tag import find_tag, read_tag
from .fiff.matrix import _read_named_matrix, _transpose_named_matrix
from .fiff.pick import pick_channels_forward
-from .source_space import read_source_spaces, find_source_space_hemi
+from .source_space import read_source_spaces_from_tree, find_source_space_hemi
from .transforms import transform_source_space_to, invert_transform
@@ -209,7 +209,7 @@ def read_forward_solution(fname, force_fixed=False, surf_ori=False,
chs.append(tag.data)
try:
- src = read_source_spaces(fid, False, tree)
+ src = read_source_spaces_from_tree(fid, tree, add_geom=False)
except Exception as inst:
fid.close()
raise ValueError, 'Could not read the source spaces (%s)' % inst
diff --git a/mne/inverse.py b/mne/inverse.py
index 35014d9..532fd51 100644
--- a/mne/inverse.py
+++ b/mne/inverse.py
@@ -15,7 +15,7 @@ from .fiff.tree import dir_tree_find
from .fiff.pick import pick_channels_evoked
from .cov import read_cov
-from .source_space import read_source_spaces, find_source_space_hemi
+from .source_space import read_source_spaces_from_tree, find_source_space_hemi
from .forward import _block_diag
from .transforms import invert_transform, transform_source_space_to
@@ -179,7 +179,7 @@ def read_inverse_operator(fname):
# Read the source spaces
#
try:
- inv['src'] = read_source_spaces(fid, False, tree)
+ inv['src'] = read_source_spaces_from_tree(fid, tree, add_geom=False)
except Exception as inst:
fid.close()
raise ValueError, 'Could not read the source spaces (%s)' % inst
diff --git a/mne/layouts/layout.py b/mne/layouts/layout.py
index c9b33ca..6c55fd8 100644
--- a/mne/layouts/layout.py
+++ b/mne/layouts/layout.py
@@ -51,21 +51,21 @@ class Layout(object):
self.pos = pos
self.names = names
-if __name__ == '__main__':
-
- layout = Layout()
-
- import pylab as pl
- pl.rcParams['axes.edgecolor'] = 'w'
- pl.close('all')
- pl.figure(facecolor='k', )
-
- for i in range(5):
- # for i in range(len(pos)):
- ax = pl.axes(layout.pos[i], axisbg='k')
- ax.plot(np.random.randn(3), 'w')
- pl.xticks([], ())
- pl.yticks([], ())
- pl.gca().grid(color='w')
-
- pl.show()
+# if __name__ == '__main__':
+#
+# layout = Layout()
+#
+# import pylab as pl
+# pl.rcParams['axes.edgecolor'] = 'w'
+# pl.close('all')
+# pl.figure(facecolor='k', )
+#
+# for i in range(5):
+# # for i in range(len(pos)):
+# ax = pl.axes(layout.pos[i], axisbg='k')
+# ax.plot(np.random.randn(3), 'w')
+# pl.xticks([], ())
+# pl.yticks([], ())
+# pl.gca().grid(color='w')
+#
+# pl.show()
diff --git a/mne/preprocessing.py b/mne/preprocessing.py
index 7ceee98..d7d31e5 100644
--- a/mne/preprocessing.py
+++ b/mne/preprocessing.py
@@ -1,57 +1,58 @@
import numpy as np
from .fiff.proj import make_projector_info
-from .fiff.compensator import get_current_comp, compensate_to, make_compensator
-
-
-def cancel_noise(data, dest_comp=0):
- """Do projection and compensation as needed
-
- Return the appropriate operators
-
- [res,proj,comp] = mne_ex_cancel_noise(data,dest_comp)
-
- res - Data after noise cancellation
- proj - The projection operator applied
- comp - The compensator which brings uncompensated data to the
- desired compensation grade (will be useful in forward
- calculations)
-
- """
- #
- # Compensate the data and make a compensator for forward modelling
- #
- comp = []
- proj = []
- comp_now = get_current_comp(data['info'])
- if comp_now == dest_comp:
- res = data
- else:
- res = compensate_to(data, dest_comp)
- print 'The data are now compensated to grade %d.' % dest_comp
-
- if dest_comp > 0:
- comp = make_compensator(res['info'], 0, dest_comp)
- print 'Appropriate forward operator compensator created.'
- else:
- print 'No forward operator compensator needed.'
-
- # Do the projection
- if data['info']['projs'] is None:
- print 'No projector included with these data.'
- else:
- # Activate the projection items
- for k in range(len(res['info']['projs'])):
- res['info']['projs'][k]['active'] = True;
-
- # Create the projector
- proj, nproj = make_projector_info(res['info'])
- if nproj == 0:
- print 'The projection vectors do not apply to these channels'
- proj = []
- else:
- print 'Created an SSP operator (subspace dimension = %d)' % nproj
- res['evoked']['epochs'] = np.dot(proj, res['evoked']['epochs'])
- print 'Projector applied to the data'
-
- return res, proj, comp
+from .fiff.compensator import get_current_comp #, compensate_to, make_compensator
+
+# XXX
+
+# def cancel_noise(data, dest_comp=0):
+# """Do projection and compensation as needed
+#
+# Return the appropriate operators
+#
+# [res,proj,comp] = mne_ex_cancel_noise(data,dest_comp)
+#
+# res - Data after noise cancellation
+# proj - The projection operator applied
+# comp - The compensator which brings uncompensated data to the
+# desired compensation grade (will be useful in forward
+# calculations)
+#
+# """
+# #
+# # Compensate the data and make a compensator for forward modelling
+# #
+# comp = []
+# proj = []
+# comp_now = get_current_comp(data['info'])
+# if comp_now == dest_comp:
+# res = data
+# else:
+# res = compensate_to(data, dest_comp)
+# print 'The data are now compensated to grade %d.' % dest_comp
+#
+# if dest_comp > 0:
+# comp = make_compensator(res['info'], 0, dest_comp)
+# print 'Appropriate forward operator compensator created.'
+# else:
+# print 'No forward operator compensator needed.'
+#
+# # Do the projection
+# if data['info']['projs'] is None:
+# print 'No projector included with these data.'
+# else:
+# # Activate the projection items
+# for k in range(len(res['info']['projs'])):
+# res['info']['projs'][k]['active'] = True;
+#
+# # Create the projector
+# proj, nproj = make_projector_info(res['info'])
+# if nproj == 0:
+# print 'The projection vectors do not apply to these channels'
+# proj = []
+# else:
+# print 'Created an SSP operator (subspace dimension = %d)' % nproj
+# res['evoked']['epochs'] = np.dot(proj, res['evoked']['epochs'])
+# print 'Projector applied to the data'
+#
+# return res, proj, comp
diff --git a/mne/source_space.py b/mne/source_space.py
index fceb24a..a2cb303 100644
--- a/mne/source_space.py
+++ b/mne/source_space.py
@@ -16,17 +16,19 @@ def patch_info(nearest):
"""Patch information in a source space
Generate the patch information from the 'nearest' vector in
- a source space
+ a source space. For vertex in the source space it provides
+ the list of neighboring vertices in the high resolution
+ triangulation.
Parameters
----------
nearest: array
- XXX ?
+ For each vertex gives the index of its closest neighbor.
Returns
-------
pinfo: list
- XXX ?
+ List of neighboring vertices
"""
if nearest is None:
pinfo = None
@@ -35,55 +37,45 @@ def patch_info(nearest):
indn = np.argsort(nearest)
nearest_sorted = nearest[indn]
- uniq, firsti = np.unique(nearest_sorted, return_index=True)
- uniq, lasti = np.unique(nearest_sorted[::-1], return_index=True)
- lasti = nearest.size - lasti
+ steps = np.where(nearest_sorted[1:] != nearest_sorted[:-1])[0] + 1
+ starti = np.r_[[0], steps]
+ stopi = np.r_[steps, [len(nearest)]]
pinfo = list()
- for k in range(len(uniq)):
- pinfo.append(indn[firsti[k]:lasti[k]])
+ for start, stop in zip(starti, stopi):
+ pinfo.append(np.sort(indn[start:stop]))
return pinfo
-def read_source_spaces(source, add_geom=False, tree=None):
+def read_source_spaces_from_tree(fid, tree, add_geom=False):
"""Read the source spaces from a FIF file
Parameters
----------
- source: string or file
- The name of the file or an open file descriptor
-
- add_geom: bool, optional (default False)
- Add geometry information to the surfaces
+ fid: file descriptor
+ An open file descriptor
tree: dict
The FIF tree structure if source is a file id.
+ add_geom: bool, optional (default False)
+ Add geometry information to the surfaces
+
Returns
-------
src: list
The list of source spaces
"""
- # Open the file, create directory
- if isinstance(source, str):
- fid, tree, _ = fiff_open(source)
- open_here = True
- else:
- fid = source
- open_here = False
-
# Find all source spaces
spaces = dir_tree_find(tree, FIFF.FIFFB_MNE_SOURCE_SPACE)
if len(spaces) == 0:
- if open_here:
- fid.close()
raise ValueError, 'No source spaces found'
src = list()
for s in spaces:
print '\tReading a source space...',
- this = _read_one_source_space(fid, s, open_here)
+ this = _read_one_source_space(fid, s)
print '[done]'
if add_geom:
complete_source_space_info(this)
@@ -92,13 +84,30 @@ def read_source_spaces(source, add_geom=False, tree=None):
print '\t%d source spaces read' % len(spaces)
- if open_here:
- fid.close()
-
return src
-def _read_one_source_space(fid, this, open_here):
+def read_source_spaces(fname, add_geom=False):
+ """Read the source spaces from a FIF file
+
+ Parameters
+ ----------
+ fname: string
+ The name of the file
+
+ add_geom: bool, optional (default False)
+ Add geometry information to the surfaces
+
+ Returns
+ -------
+ src: list
+ The list of source spaces
+ """
+ fid, tree, _ = fiff_open(fname)
+ return read_source_spaces_from_tree(fid, tree, add_geom=add_geom)
+
+
+def _read_one_source_space(fid, this):
"""Read one source space
"""
FIFF_BEM_SURF_NTRI = 3104
@@ -114,9 +123,7 @@ def _read_one_source_space(fid, this, open_here):
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_NPOINTS)
if tag is None:
- if open_here:
- fid.close()
- raise ValueError, 'Number of vertices not found'
+ raise ValueError, 'Number of vertices not found'
res['np'] = tag.data
@@ -132,35 +139,25 @@ def _read_one_source_space(fid, this, open_here):
tag = find_tag(fid, this, FIFF.FIFF_MNE_COORD_FRAME)
if tag is None:
- if open_here:
- fid.close()
- raise ValueError, 'Coordinate frame information not found'
+ raise ValueError, 'Coordinate frame information not found'
res['coord_frame'] = tag.data
# Vertices, normals, and triangles
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_POINTS)
if tag is None:
- if open_here:
- fid.close()
raise ValueError, 'Vertex data not found'
res['rr'] = tag.data.astype(np.float) # make it double precision for mayavi
if res['rr'].shape[0] != res['np']:
- if open_here:
- fid.close()
raise ValueError, 'Vertex information is incorrect'
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_NORMALS)
if tag is None:
- if open_here:
- fid.close()
raise ValueError, 'Vertex normals not found'
res['nn'] = tag.data
if res['nn'].shape[0] != res['np']:
- if open_here:
- fid.close()
raise ValueError, 'Vertex normal information is incorrect'
if res['ntri'] > 0:
@@ -168,8 +165,6 @@ def _read_one_source_space(fid, this, open_here):
if tag is None:
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_TRIANGLES)
if tag is None:
- if open_here:
- fid.close()
raise ValueError, 'Triangulation not found'
else:
res['tris'] = tag.data - 1 # index start at 0 in Python
@@ -177,8 +172,6 @@ def _read_one_source_space(fid, this, open_here):
res['tris'] = tag.data - 1 # index start at 0 in Python
if res['tris'].shape[0] != res['ntri']:
- if open_here:
- fid.close()
raise ValueError, 'Triangulation information is incorrect'
else:
res['tris'] = None
@@ -193,14 +186,10 @@ def _read_one_source_space(fid, this, open_here):
res['nuse'] = tag.data
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_SELECTION)
if tag is None:
- if open_here:
- fid.close()
raise ValueError, 'Source selection information missing'
res['inuse'] = tag.data.astype(np.int).T
if len(res['inuse']) != res['np']:
- if open_here:
- fid.close()
raise ValueError, 'Incorrect number of entries in source space ' \
'selection'
diff --git a/mne/tests/test_source_space.py b/mne/tests/test_source_space.py
index 46d8193..0ae1f48 100644
--- a/mne/tests/test_source_space.py
+++ b/mne/tests/test_source_space.py
@@ -7,7 +7,7 @@ from mne.datasets import sample
examples_folder = op.join(op.dirname(__file__), '..', '..', 'examples')
data_path = sample.data_path(examples_folder)
-fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis-eeg-oct-6-fwd.fif')
+fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis-eeg-oct-6p-fwd.fif')
def test_read_source_spaces():
"""Testing reading of source space meshes
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/python-mne.git
More information about the debian-med-commit
mailing list