[med-svn] [python-mne] 02/376: first running version of read_evoked
Yaroslav Halchenko
debian at onerussian.com
Fri Nov 27 17:21:54 UTC 2015
This is an automated email from the git hooks/post-receive script.
yoh pushed a commit to annotated tag v0.1
in repository python-mne.
commit e334ccbe52d1cfc4f56170f2c0b7e71305f525db
Author: Alexandre Gramfort <alexandre.gramfort at inria.fr>
Date: Mon Dec 27 16:04:14 2010 -0500
first running version of read_evoked
---
examples/read_ave.py | 20 +-
fiff/__init__.py | 1 +
fiff/ctf.py | 175 +++++++++++++
fiff/evoked.py | 567 +++++++++++++++++++++++++++++++++++++++++++
fiff/open.py | 5 +-
fiff/{read_tag.py => tag.py} | 109 +++++++--
fiff/tree.py | 50 ++--
7 files changed, 892 insertions(+), 35 deletions(-)
diff --git a/examples/read_ave.py b/examples/read_ave.py
index 4ced76f..ae3e243 100644
--- a/examples/read_ave.py
+++ b/examples/read_ave.py
@@ -1,4 +1,22 @@
import fiff
fname = 'sm02a1-ave.fif'
-fid, tree, directory = fiff.fiff_open(fname, verbose=True)
+
+# fid, tree, directory = fiff.fiff_open(fname, verbose=True)
+# meas = fiff.tree.dir_tree_find(tree, fiff.FIFF.FIFFB_MEAS)
+# meas_info = fiff.tree.dir_tree_find(meas, fiff.FIFF.FIFFB_MEAS_INFO)
+
+# meas = fiff.evoked.read_meas_info(fname)
+# def is_tree(tree):
+# assert isinstance(tree, dict)
+# tree.block
+# for child in tree.children:
+# is_tree(child)
+#
+# is_tree(tree)
+# meas = fiff.tree.dir_tree_find(tree, fiff.FIFF.FIFFB_MEAS)
+# is_tree(meas)
+# meas_info = fiff.tree.dir_tree_find(meas, fiff.FIFF.FIFFB_MEAS_INFO)
+
+data = fiff.read_evoked(fname, setno=0)
+
diff --git a/fiff/__init__.py b/fiff/__init__.py
index 3d30a6e..51e92f4 100644
--- a/fiff/__init__.py
+++ b/fiff/__init__.py
@@ -1,2 +1,3 @@
from .open import fiff_open
+from .evoked import read_evoked
from .constants import FIFF
diff --git a/fiff/ctf.py b/fiff/ctf.py
new file mode 100644
index 0000000..2bfb52f
--- /dev/null
+++ b/fiff/ctf.py
@@ -0,0 +1,175 @@
+import numpy as np
+
+from .constants import FIFF
+from .tag import find_tag, has_tag, read_tag
+from .tree import dir_tree_find
+
+
+def hex2dec(s):
+ return int(s, 16)
+
+
+def read_named_matrix(fid, node, matkind):
+ """
+ %
+ % [mat] = fiff_read_named_matrix(fid,node)
+ %
+ % Read named matrix from the given node
+ %
+ """
+
+ # Descend one level if necessary
+ if node.block != FIFF.FIFFB_MNE_NAMED_MATRIX:
+ for k in range(node.nchild):
+ if node.children(k).block == FIFF.FIFFB_MNE_NAMED_MATRIX:
+ if has_tag(node.children(k), matkind):
+ node = node.children(k);
+ break;
+ else:
+ raise ValueError, 'Desired named matrix (kind = %d) not available' % matkind
+
+ else:
+ if not has_tag(node,matkind):
+ raise 'Desired named matrix (kind = %d) not available' % matkind
+
+ # Read everything we need
+ tag = find_tag(fid, node, matkind)
+ if tag is None:
+ raise ValueError, 'Matrix data missing'
+ else:
+ data = tag.data;
+
+ nrow, ncol = data.shape
+ tag = find_tag(fid, node, FIFF.FIFF_MNE_NROW)
+ if tag is not None:
+ if tag.data != nrow:
+ raise ValueError, 'Number of rows in matrix data and FIFF_MNE_NROW tag do not match'
+
+ tag = find_tag(fid, node, FIFF.FIFF_MNE_NCOL)
+ if tag is not None:
+ if tag.data != ncol:
+ raise ValueError, 'Number of columns in matrix data and FIFF_MNE_NCOL tag do not match'
+
+ tag = find_tag(fid, node, FIFF.FIFF_MNE_ROW_NAMES)
+ if tag is not None:
+ row_names = tag.data;
+ else:
+ row_names = None
+
+ tag = find_tag(fid, node, FIFF.FIFF_MNE_COL_NAMES)
+ if tag is not None:
+ col_names = tag.data;
+ else:
+ col_names = None
+
+ # Put it together
+ mat = dict(nrow=nrow, ncol=ncol)
+ if row_names is not None:
+ mat['row_names'] = row_names.split(':')
+ else:
+ mat['row_names'] = None
+
+ if col_names is not None:
+ mat['col_names'] = col_names.split(':')
+ else:
+ mat['col_names'] = None
+
+ mat['data'] = data;
+ return mat
+
+
+def read_ctf_comp(fid, node, chs):
+ """
+ %
+ % [ compdata ] = fiff_read_ctf_comp(fid,node,chs)
+ %
+ % Read the CTF software compensation data from the given node
+ %
+ """
+
+ compdata = []
+ comps = dir_tree_find(node, FIFF.FIFFB_MNE_CTF_COMP_DATA)
+
+ for node in comps:
+
+ # Read the data we need
+ mat = read_named_matrix(fid, node, FIFF.FIFF_MNE_CTF_COMP_DATA)
+ for p in range(node.nent):
+ kind = node.dir[p].kind
+ pos = node.dir[p].pos
+ if kind == FIFF.FIFF_MNE_CTF_COMP_KIND:
+ tag = read_tag(fid,pos)
+ break
+ else:
+ raise ValueError, 'Compensation type not found'
+
+ # Get the compensation kind and map it to a simple number
+ one = dict(ctfkind=tag.data, kind=-1)
+ del tag
+
+ if one.ctfkind == hex2dec('47314252'):
+ one.kind = 1
+ elif one.ctfkind == hex2dec('47324252'):
+ one.kind = 2
+ elif one.ctfkind == hex2dec('47334252'):
+ one.kind = 3
+ else:
+ one.kind = one.ctfkind
+
+ for p in range(node.nent):
+ kind = node.dir[p].kind
+ pos = node.dir[p].pos
+ if kind == FIFF.FIFF_MNE_CTF_COMP_CALIBRATED:
+ tag = read_tag(fid,pos)
+ calibrated = tag.data
+ break
+ else:
+ calibrated = False
+
+ one['save_calibrated'] = calibrated;
+ one['rowcals'] = np.ones(1, mat.shape[0])
+ one['colcals'] = np.ones(1, mat.shape[1])
+ if not calibrated:
+ #
+ # Calibrate...
+ #
+ # Do the columns first
+ #
+ ch_names = []
+ for p in range(len(chs)):
+ ch_names.append(chs[p].ch_name)
+
+ col_cals = np.zeros(mat.data.shape[1])
+ for col in range(mat.data.shape[1]):
+ p = ch_names.count(mat.col_names[col])
+ if p == 0:
+ raise ValueError, 'Channel %s is not available in data' % mat.col_names[col]
+ elif p > 1:
+ raise ValueError, 'Ambiguous channel %s' % mat.col_names[col]
+
+ col_cals[col] = 1.0 / (chs[p].range * chs[p].cal)
+
+ # Then the rows
+ row_cals = np.zeros(mat.data.shape[0])
+ for row in range(mat.data.shape[0]):
+ p = ch_names.count(mat.row_names[row])
+ if p == 0:
+ raise ValueError, 'Channel %s is not available in data', mat.row_names[row]
+ elif p > 1:
+ raise ValueError, 'Ambiguous channel %s' % mat.row_names[row]
+
+ row_cals[row] = chs[p].range * chs[p].cal
+
+ mat.data = np.dot(np.diag(row_cals), np.dot(mat.data, np.diag(col_cals)))
+ one.rowcals = row_cals
+ one.colcals = col_cals
+
+ one.data = mat
+ compdata.append(one)
+ del row_cals
+ del col_cals
+
+ if len(compdata) > 0:
+ print '\tRead %d compensation matrices\n' % len(compdata)
+
+ return compdata
diff --git a/fiff/evoked.py b/fiff/evoked.py
new file mode 100644
index 0000000..9aba2ce
--- /dev/null
+++ b/fiff/evoked.py
@@ -0,0 +1,567 @@
+import numpy as np
+
+from .bunch import Bunch
+from .constants import FIFF
+from .open import fiff_open
+from .ctf import read_ctf_comp
+from .tag import read_tag, find_tag
+from .tree import dir_tree_find
+
+
+def read_proj(fid, node):
+ """
+ [ projdata ] = fiff_read_proj(fid,node)
+
+ Read the SSP data under a given directory node
+
+ """
+
+ projdata = []
+
+ # Locate the projection data
+ nodes = dir_tree_find(node, FIFF.FIFFB_PROJ)
+ if len(nodes) == 0:
+ return projdata
+
+ tag = find_tag(fid, nodes[0], FIFF.FIFF_NCHAN)
+ if tag is not None:
+ global_nchan = tag.data;
+
+ items = dir_tree_find(nodes[0], FIFF.FIFFB_PROJ_ITEM)
+ for i in range(len(items)):
+
+ # Find all desired tags in one item
+ item = items[i]
+ tag = find_tag(fid, item, FIFF.FIFF_NCHAN)
+ if tag is not None:
+ nchan = tag.data
+ else:
+ nchan = global_nchan
+
+ tag = find_tag(fid, item, FIFF.FIFF_DESCRIPTION)
+ if tag is not None:
+ desc = tag.data
+ else:
+ tag = find_tag(fid, item, FIFF.FIFF_NAME)
+ if tag is not None:
+ desc = tag.data
+ else:
+ raise ValueError, 'Projection item description missing'
+
+ tag = find_tag(fid, item, FIFF.FIFF_PROJ_ITEM_CH_NAME_LIST)
+ if tag is not None:
+ namelist = tag.data;
+ else:
+ raise ValueError, 'Projection item channel list missing'
+
+ tag = find_tag(fid, item,FIFF.FIFF_PROJ_ITEM_KIND);
+ if tag is not None:
+ kind = tag.data;
+ else:
+ raise ValueError, 'Projection item kind missing'
+
+ tag = find_tag(fid, item, FIFF.FIFF_PROJ_ITEM_NVEC)
+ if tag is not None:
+ nvec = tag.data
+ else:
+ raise ValueError, 'Number of projection vectors not specified'
+
+ tag = find_tag(fid, item, FIFF.FIFF_PROJ_ITEM_CH_NAME_LIST)
+ if tag is not None:
+ names = tag.data.split(':')
+ else:
+ raise ValueError, 'Projection item channel list missing'
+
+ tag = find_tag(fid, item,FIFF.FIFF_PROJ_ITEM_VECTORS);
+ if tag is not None:
+ data = tag.data;
+ else:
+ raise ValueError, 'Projection item data missing'
+
+ tag = find_tag(fid, item,FIFF.FIFF_MNE_PROJ_ITEM_ACTIVE);
+ if tag is not None:
+ active = tag.data;
+ else:
+ active = False;
+
+ if data.shape[1] != len(names):
+ raise ValueError, 'Number of channel names does not match the size of data matrix'
+
+ # Use exactly the same fields in data as in a named matrix
+ one = Bunch(kind=kind, active=active, desc=desc,
+ data=Bunch(nrow=nvec, ncol=nchan, row_names=None,
+ col_names=names, data=data))
+
+ projdata.append(one)
+
+ if len(projdata) > 0:
+ print '\tRead a total of %d projection items:\n', len(projdata)
+ for k in range(len(projdata)):
+ print '\t\t%s (%d x %d)' % (projdata[k].desc,
+ projdata[k].data.nrow,
+ projdata[k].data.ncol)
+ if projdata[k].active:
+ print ' active\n'
+ else:
+ print ' idle\n'
+
+ return projdata
+
+
+def read_bad_channels(fid, node):
+ """
+ %
+ % [bads] = fiff_read_bad_channels(fid,node)
+ %
+ % Reas the bad channel list from a node if it exists
+ %
+ % fid - The file id
+ % node - The node of interes
+ %
+ """
+
+ nodes = dir_tree_find(node, FIFF.FIFFB_MNE_BAD_CHANNELS)
+
+ bads = [];
+ if len(nodes) > 0:
+ for node in nodes:
+ tag = find_tag(fid, node, FIFF.FIFF_MNE_CH_NAME_LIST)
+ if tag is not None:
+ bads = tag.data.split(':')
+ return bads
+
+
+def read_meas_info(source, tree=None):
+ """[info,meas] = fiff_read_meas_info(source,tree)
+
+ Read the measurement info
+
+ If tree is specified, source is assumed to be an open file id,
+ otherwise a the name of the file to read. If tree is missing, the
+ meas output argument should not be specified.
+ """
+ if tree is None:
+ fid, tree, _ = fiff_open(source)
+ open_here = True
+ else:
+ fid = source
+ open_here = False
+
+ # Find the desired blocks
+ meas = dir_tree_find(tree, FIFF.FIFFB_MEAS)
+ if len(meas) == 0:
+ if open_here:
+ fid.close()
+ raise ValueError, 'Could not find measurement data'
+ if len(meas) > 1:
+ if open_here:
+ fid.close()
+ raise ValueError, 'Cannot read more that 1 measurement data'
+ meas = meas[0]
+
+ meas_info = dir_tree_find(meas, FIFF.FIFFB_MEAS_INFO)
+ if len(meas_info) == 0:
+ if open_here:
+ fid.close()
+ raise ValueError, 'Could not find measurement info'
+ if len(meas_info) > 1:
+ if open_here:
+ fid.close()
+ raise ValueError, 'Cannot read more that 1 measurement info'
+ meas_info = meas_info[0]
+
+ # Read measurement info
+ dev_head_t = None
+ ctf_head_t = None
+ meas_date = None
+ highpass = None
+ lowpass = None
+ nchan = None
+ sfreq = None
+ chs = []
+ p = 0
+ for k in range(meas_info.nent):
+ kind = meas_info.directory[k].kind
+ pos = meas_info.directory[k].pos
+ if kind == FIFF.FIFF_NCHAN:
+ tag = read_tag(fid, pos)
+ nchan = tag.data
+ elif kind == FIFF.FIFF_SFREQ:
+ tag = read_tag(fid, pos)
+ sfreq = tag.data
+ elif kind == FIFF.FIFF_CH_INFO:
+ tag = read_tag(fid, pos)
+ chs.append(tag.data)
+ p += 1
+ elif kind == FIFF.FIFF_LOWPASS:
+ tag = read_tag(fid, pos)
+ lowpass = tag.data
+ elif kind == FIFF.FIFF_HIGHPASS:
+ tag = read_tag(fid, pos)
+ highpass = tag.data
+ elif kind == FIFF.FIFF_MEAS_DATE:
+ tag = read_tag(fid, pos)
+ meas_date = tag.data
+ elif kind == FIFF.FIFF_COORD_TRANS:
+ tag = read_tag(fid, pos)
+ cand = tag.data
+ if cand.from_ == FIFF.FIFFV_COORD_DEVICE and \
+ cand.to == FIFF.FIFFV_COORD_HEAD: # XXX : from
+ dev_head_t = cand
+ elif cand.from_ == FIFF.FIFFV_MNE_COORD_CTF_HEAD and \
+ cand.to == FIFF.FIFFV_COORD_HEAD:
+ ctf_head_t = cand
+
+ # XXX : fix
+ # Check that we have everything we need
+ # if ~exist('nchan','var')
+ # if open_here
+ # fclose(fid);
+ # end
+ # error(me,'Number of channels in not defined');
+ # end
+ # if ~exist('sfreq','var')
+ # if open_here
+ # fclose(fid);
+ # end
+ # error(me,'Sampling frequency is not defined');
+ # end
+ # if ~exist('chs','var')
+ # if open_here
+ # fclose(fid);
+ # end
+ # error(me,'Channel information not defined');
+ # end
+ # if length(chs) ~= nchan
+ # if open_here
+ # fclose(fid);
+ # end
+ # error(me,'Incorrect number of channel definitions found');
+ # end
+
+ if dev_head_t is None or ctf_head_t is None:
+ hpi_result = dir_tree_find(meas_info, FIFF.FIFFB_HPI_RESULT)
+ if len(hpi_result) == 1:
+ hpi_result = hpi_result[0]
+ for k in range(hpi_result.nent):
+ kind = hpi_result.directory[k].kind
+ pos = hpi_result.directory[k].pos
+ if kind == FIFF.FIFF_COORD_TRANS:
+ tag = read_tag(fid, pos)
+ cand = tag.data;
+ if cand.from_ == FIFF.FIFFV_COORD_DEVICE and \
+ cand.to == FIFF.FIFFV_COORD_HEAD: # XXX: from
+ dev_head_t = cand;
+ elif cand.from_ == FIFF.FIFFV_MNE_COORD_CTF_HEAD and \
+ cand.to == FIFF.FIFFV_COORD_HEAD:
+ ctf_head_t = cand;
+
+ # Locate the Polhemus data
+ isotrak = dir_tree_find(meas_info,FIFF.FIFFB_ISOTRAK)
+ if len(isotrak):
+ isotrak = isotrak[0]
+ else:
+ if len(isotrak) == 0:
+ if open_here:
+ fid.close()
+ raise ValueError, 'Isotrak not found'
+ if len(isotrak) > 1:
+ if open_here:
+ fid.close()
+ raise ValueError, 'Multiple Isotrak found'
+
+ dig = []
+ if len(isotrak) == 1:
+ for k in range(isotrak.nent):
+ kind = isotrak.directory[k].kind;
+ pos = isotrak.directory[k].pos;
+ if kind == FIFF.FIFF_DIG_POINT:
+ tag = read_tag(fid,pos);
+ dig.append(tag.data)
+ dig[-1]['coord_frame'] = FIFF.FIFFV_COORD_HEAD
+
+ # Locate the acquisition information
+ acqpars = dir_tree_find(meas_info, FIFF.FIFFB_DACQ_PARS);
+ acq_pars = []
+ acq_stim = []
+ if len(acqpars) == 1:
+ for k in range(acqpars.nent):
+ kind = acqpars.directory[k].kind
+ pos = acqpars.directory[k].pos
+ if kind == FIFF.FIFF_DACQ_PARS:
+ tag = read_tag(fid,pos)
+ acq_pars = tag.data
+ elif kind == FIFF.FIFF_DACQ_STIM:
+ tag = read_tag(fid, pos)
+ acq_stim = tag.data
+
+ # Load the SSP data
+ projs = read_proj(fid, meas_info)
+
+ # Load the CTF compensation data
+ comps = read_ctf_comp(fid, meas_info,chs)
+
+ # Load the bad channel list
+ bads = read_bad_channels(fid, meas_info)
+
+ #
+ # Put the data together
+ #
+ if tree.id is not None:
+ info = dict(file_id=tree.id)
+ else:
+ info = dict(file_id=None)
+
+ # Make the most appropriate selection for the measurement id
+ if meas_info.parent_id is None:
+ if meas_info.id is None:
+ if meas.id is None:
+ if meas.parent_id is None:
+ info['meas_id'] = info.file_id
+ else:
+ info['meas_id'] = meas.parent_id
+ else:
+ info['meas_id'] = meas.id
+ else:
+ info['meas_id'] = meas_info.id
+ else:
+ info['meas_id'] = meas_info.parent_id;
+
+ if meas_date is None:
+ info['meas_date'] = [info['meas_id']['secs'], info['meas_id']['usecs']]
+ else:
+ info['meas_date'] = meas_date
+
+ info['nchan'] = nchan
+ info['sfreq'] = sfreq
+ info['highpass'] = highpass if highpass is not None else 0
+ info['lowpass'] = lowpass if lowpass is not None else info['sfreq']/2.0
+
+ # Add the channel information and make a list of channel names
+ # for convenience
+ info['chs'] = chs;
+ info['ch_names'] = [ch.ch_name for ch in chs]
+
+ #
+ # Add the coordinate transformations
+ #
+ info['dev_head_t'] = dev_head_t
+ info['ctf_head_t'] = ctf_head_t
+ if dev_head_t is not None and ctf_head_t is not None:
+ info['dev_ctf_t'] = info['dev_head_t']
+ info['dev_ctf_t'].to = info['ctf_head_t'].from_ # XXX : see if better name
+ info['dev_ctf_t'].trans = np.dot(np.inv(ctf_head_t.trans), info.dev_ctf_t.trans)
+ else:
+ info['dev_ctf_t'] = []
+
+ # All kinds of auxliary stuff
+ info['dig'] = dig
+ info['bads'] = bads
+ info['projs'] = projs
+ info['comps'] = comps
+ info['acq_pars'] = acq_pars
+ info['acq_stim'] = acq_stim
+
+ if open_here:
+ fid.close()
+
+ return info, meas
+
+
+def read_evoked(fname, setno=1):
+ """
+ [data] = fiff_read_evoked(fname,setno)
+
+ Read one evoked data set
+
+ """
+
+ if setno < 0:
+ raise ValueError, 'Data set selector must be positive'
+
+ print 'Reading %s ...\n' % fname
+ fid, tree, _ = fiff_open(fname);
+
+ # Read the measurement info
+ info, meas = read_meas_info(fid, tree)
+ info['filename'] = fname
+
+ # Locate the data of interest
+ processed = dir_tree_find(meas,FIFF.FIFFB_PROCESSED_DATA);
+ if len(processed) == 0:
+ fid.close()
+ raise ValueError, 'Could not find processed data'
+
+ evoked = dir_tree_find(meas,FIFF.FIFFB_EVOKED)
+ if len(evoked) == 0:
+ fid.close(fid)
+ raise ValueError, 'Could not find evoked data'
+
+ # Identify the aspects
+ #
+ naspect = 0
+ is_smsh = None
+ sets = []
+ for k in range(len(evoked)):
+ aspects_k = dir_tree_find(evoked[k], FIFF.FIFFB_ASPECT)
+ set_k = dict(aspects=aspects_k,
+ naspect=len(aspects_k))
+ sets.append(set_k)
+
+ if sets[k]['naspect'] > 0:
+ if is_smsh is None:
+ is_smsh = np.zeros((1, sets[k]['naspect']))
+ else:
+ is_smsh = np.c_[is_smsh, np.zeros((1, sets[k]['naspect']))]
+ naspect += sets[k]['naspect']
+
+ saspects = dir_tree_find(evoked[k], FIFF.FIFFB_SMSH_ASPECT)
+ nsaspects = len(saspects)
+ if nsaspects > 0:
+ sets[k]['naspect'] += nsaspects
+ sets[k]['naspect'] = [sets[k]['naspect'], saspects] # XXX : potential bug
+ is_smsh = np.c_[is_smsh, np.ones(1, sets[k]['naspect'])]
+ naspect += nsaspects;
+
+ print '\t%d evoked data sets containing a total of %d data aspects' \
+ ' in %s\n' % (len(evoked), naspect, fname)
+
+ if setno >= naspect or setno < 0:
+ fid.close()
+ raise ValueError, 'Data set selector out of range'
+
+ # Next locate the evoked data set
+ #
+ p = 0
+ goon = True
+ for k in range(len(evoked)):
+ for a in range(sets[k]['naspect']):
+ if p == setno:
+ my_evoked = evoked[k]
+ my_aspect = sets[k]['aspects'][a]
+ goon = False
+ break
+ p += 1
+ if not goon:
+ break
+ else:
+ # The desired data should have been found but better to check
+ fid.close(fid)
+ raise ValueError, 'Desired data set not found'
+
+ # Now find the data in the evoked block
+ nchan = 0
+ sfreq = -1
+ q = 0
+ chs = []
+ comment = None
+ for k in range(my_evoked.nent):
+ kind = my_evoked.directory[k].kind
+ pos = my_evoked.directory[k].pos
+ if kind == FIFF.FIFF_COMMENT:
+ tag = read_tag(fid, pos)
+ comment = tag.data
+ elif kind == FIFF.FIFF_FIRST_SAMPLE:
+ tag = read_tag(fid, pos)
+ first = tag.data
+ elif kind == FIFF.FIFF_LAST_SAMPLE:
+ tag = read_tag(fid, pos)
+ last = tag.data
+ elif kind == FIFF.FIFF_NCHAN:
+ tag = read_tag(fid, pos)
+ nchan = tag.data
+ elif kind == FIFF.FIFF_SFREQ:
+ tag = read_tag(fid, pos)
+ sfreq = tag.data
+ elif kind ==FIFF.FIFF_CH_INFO:
+ tag = read_tag(fid, pos)
+ chs.append(tag.data)
+ q += 1
+
+ if comment is None:
+ comment = 'No comment'
+
+ # Local channel information?
+ if nchan > 0:
+ if chs is None:
+ fid.close()
+ raise ValueError, 'Local channel information was not found when it was expected.'
+
+ if len(chs) != nchan:
+ fid.close()
+ raise ValueError, 'Number of channels and number of channel definitions are different'
+
+ info.chs = chs
+ info.nchan = nchan
+ print '\tFound channel information in evoked data. nchan = %d\n' % nchan
+ if sfreq > 0:
+ info['sfreq'] = sfreq
+
+ nsamp = last - first + 1
+ print '\tFound the data of interest:\n'
+ print '\t\tt = %10.2f ... %10.2f ms (%s)\n' % (
+ 1000*first/info['sfreq'], 1000*last/info['sfreq'], comment)
+ if info['comps'] is not None:
+ print '\t\t%d CTF compensation matrices available\n' % len(info['comps'])
+
+ # Read the data in the aspect block
+ nave = 1
+ epoch = []
+ for k in range(my_aspect.nent):
+ kind = my_aspect.directory[k].kind
+ pos = my_aspect.directory[k].pos
+ if kind == FIFF.FIFF_COMMENT:
+ tag = read_tag(fid, pos)
+ comment = tag.data
+ elif kind == FIFF.FIFF_ASPECT_KIND:
+ tag = read_tag(fid, pos)
+ aspect_kind = tag.data
+ elif kind == FIFF.FIFF_NAVE:
+ tag = read_tag(fid, pos)
+ nave = tag.data
+ elif kind == FIFF.FIFF_EPOCH:
+ tag = read_tag(fid, pos)
+ epoch.append(tag)
+
+ print '\t\tnave = %d aspect type = %d\n' % (nave, aspect_kind)
+
+ nepoch = len(epoch)
+ if nepoch != 1 and nepoch != info.nchan:
+ fid.close()
+ raise ValueError, 'Number of epoch tags is unreasonable '\
+ '(nepoch = %d nchan = %d)' % (nepoch, info.nchan)
+
+ if nepoch == 1:
+ # Only one epoch
+ all_data = epoch[0].data
+ # May need a transpose if the number of channels is one
+ if all_data.shape[1] == 1 and info.nchan == 1:
+ all_data = all_data.T
+
+ else:
+ # Put the old style epochs together
+ all_data = epoch[0].data.T
+ for k in range(1, nepoch):
+ all_data = np.r_[all_data, epoch[k].data.T]
+
+ if all_data.shape[1] != nsamp:
+ fid.close()
+ raise ValueError, 'Incorrect number of samples (%d instead of %d)' % (
+ all_data.shape[1], nsamp)
+
+ # Calibrate
+ cals = np.array([info['chs'][k].cal for k in range(info['nchan'])])
+ all_data = np.dot(np.diag(cals.ravel()), all_data)
+
+ # Put it all together
+ data = dict(info=info, evoked=dict(aspect_kind=aspect_kind,
+ is_smsh=is_smsh[setno],
+ nave=nave, first=first,
+ last=last, comment=comment,
+ times=np.arange(first, last,
+ dtype=np.float) / info['sfreq'],
+ epochs=all_data))
+
+ fid.close()
+
+ return data
diff --git a/fiff/open.py b/fiff/open.py
index 515fc49..6da7123 100644
--- a/fiff/open.py
+++ b/fiff/open.py
@@ -1,6 +1,3 @@
-import struct
-import numpy as np
-
from .read_tag import read_tag_info, read_tag
from .tree import make_dir_tree
from .constants import FIFF
@@ -45,7 +42,7 @@ def fiff_open(fname, verbose=False):
pos = fid.tell()
directory.append(read_tag_info(fid))
- tree = make_dir_tree(fid, directory)
+ tree, _ = make_dir_tree(fid, directory)
if verbose:
print '[done]\n'
diff --git a/fiff/read_tag.py b/fiff/tag.py
similarity index 60%
rename from fiff/read_tag.py
rename to fiff/tag.py
index 700500d..693bbd0 100644
--- a/fiff/read_tag.py
+++ b/fiff/tag.py
@@ -54,9 +54,77 @@ def read_tag(fid, pos=None):
if tag.size > 0:
matrix_coding = is_matrix & tag.type
if matrix_coding != 0:
- raise ValueError, "matrix_coding not implemented"
- # XXX : todo
- pass
+ matrix_coding = matrix_coding >> 16
+
+ # Matrices
+ if matrix_coding == matrix_coding_dense:
+ # Find dimensions and return to the beginning of tag data
+ pos = fid.tell()
+ fid.seek(tag.size - 4, 1)
+ ndim = np.fromfile(fid, dtype='>i', count=1)
+ fid.seek(-(ndim + 1) * 4, 1)
+ dims = np.fromfile(fid, dtype='>i', count=ndim)
+ #
+ # Back to where the data start
+ #
+ fid.seek(pos, 0);
+
+ if ndim != 2:
+ raise ValueError, 'Only two-dimensional matrices are supported at this time'
+
+ matrix_type = data_type & tag.type
+
+ if matrix_type == FIFF.FIFFT_INT:
+ tag.data = np.fromfile(fid, dtype='>i', count=dims.prod()).reshape(dims).T
+ elif matrix_type == FIFF.FIFFT_JULIAN:
+ tag.data = np.fromfile(fid, dtype='>i', count=dims.prod()).reshape(dims).T
+ elif matrix_type == FIFF.FIFFT_FLOAT:
+ tag.data = np.fromfile(fid, dtype='>f4', count=dims.prod()).reshape(dims).T
+ elif matrix_type == FIFF.FIFFT_DOUBLE:
+ tag.data = np.fromfile(fid, dtype='>f8', count=dims.prod()).reshape(dims).T
+ elif matrix_type == FIFF.FIFFT_COMPLEX_FLOAT:
+ data = np.fromfile(fid, dtype='>f4', count=2*dims.prod())
+ # Note: we need the non-conjugate transpose here
+ tag.data = (data[::2] + 1j * data[1::2]).reshape(dims).T
+ elif matrix_type == FIFF.FIFFT_COMPLEX_DOUBLE:
+ data = np.fromfile(fid, dtype='>f8', count=2*dims.prod())
+ # Note: we need the non-conjugate transpose here
+ tag.data = (data[::2] + 1j * data[1::2]).reshape(dims).T
+ else:
+ raise ValueError, 'Cannot handle matrix of type %d yet' % matrix_type
+
+ elif matrix_coding == matrix_coding_CCS or matrix_coding == matrix_coding_RCS:
+ from scipy import sparse
+ # Find dimensions and return to the beginning of tag data
+ pos = fid.tell()
+ fid.seek(tag.size-4, 1)
+ ndim = np.fromfile(fid, dtype='>i', count=1)
+ fid.seek(-(ndim+2)*4, 1)
+ dims = np.fromfile(fid, dtype='>i', count=ndim+1)
+ if ndim != 2:
+ raise ValueError, 'Only two-dimensional matrices are supported at this time'
+
+ # Back to where the data start
+ fid.seek(pos, 0)
+ nnz = dims[0]
+ nrow = dims[1]
+ ncol = dims[2]
+ sparse_data = np.fromfile(fid, dtype='>f4', count=nnz)
+ if matrix_coding == matrix_coding_CCS:
+ # CCS
+ sparse.csc_matrix()
+ sparse_indices = np.fromfile(fid, dtype='>i4', count=nnz)
+ sparse_ptrs = np.fromfile(fid, dtype='>i4', count=ncol+1)
+ tag.data = sparse.csc_matrix((sparse_data, sparse_indices,
+ sparse_ptrs), shape=dims)
+ else:
+ # RCS
+ sparse_indices = np.fromfile(fid, dtype='>i4', count=nnz)
+ sparse_ptrs = np.fromfile(fid, dtype='>i4', count=nrow+1)
+ tag.data = sparse.csr_matrix((sparse_data, sparse_indices,
+ sparse_ptrs), shape=dims)
+ else:
+ raise ValueError, 'Cannot handle other than dense or sparse matrices yet'
else:
# All other data types
@@ -77,7 +145,8 @@ def read_tag(fid, pos=None):
elif tag.type == FIFF.FIFFT_DOUBLE:
tag.data = np.fromfile(fid, dtype=">f8", count=tag.size/8)
elif tag.type == FIFF.FIFFT_STRING:
- tag.data = np.fromfile(fid, dtype=">c1", count=tag.size)
+ tag.data = np.fromfile(fid, dtype=">c", count=tag.size)
+ tag.data = ''.join(tag.data)
elif tag.type == FIFF.FIFFT_DAU_PACK16:
tag.data = np.fromfile(fid, dtype=">h2", count=tag.size/2)
elif tag.type == FIFF.FIFFT_COMPLEX_FLOAT:
@@ -104,11 +173,12 @@ def read_tag(fid, pos=None):
tag.data['coord_frame'] = 0
elif tag.type == FIFF.FIFFT_COORD_TRANS_STRUCT:
tag.data = Bunch()
- tag.data['from'] = np.fromfile(fid, dtype=">i4", count=1)
+ tag.data['from_'] = np.fromfile(fid, dtype=">i4", count=1)
tag.data['to'] = np.fromfile(fid, dtype=">i4", count=1)
rot = np.fromfile(fid, dtype=">f4", count=9).reshape(3, 3)
move = np.fromfile(fid, dtype=">f4", count=3)
- tag.data['trans'] = np.r_[ np.c_[rot, move], [0, 0, 0, 1]]
+ tag.data['trans'] = np.r_[ np.c_[rot, move],
+ np.array([[0], [0], [0], [1]]).T]
#
# Skip over the inverse transformation
# It is easier to just use inverse of trans in Matlab
@@ -137,10 +207,10 @@ def read_tag(fid, pos=None):
if kind == FIFF.FIFFV_MEG_CH or kind == FIFF.FIFFV_REF_MEG_CH:
tag.data.coil_trans = np.r_[ np.c_[loc[3:5], loc[6:8],
loc[9:11], loc[0:2] ],
- [0, 0, 0, 1 ] ]
+ np.array([0, 0, 0, 1 ]).reshape(1, 4) ]
tag.data.coord_frame = FIFF.FIFFV_COORD_DEVICE
elif tag.data.kind == FIFF.FIFFV_EEG_CH:
- if np.norm(loc[3:5]) > 0:
+ if np.linalg.norm(loc[3:5]) > 0:
tag.data.eeg_loc = np.c_[ loc[0:2], loc[3:5] ]
else:
tag.data.eeg_loc = loc[1:3]
@@ -157,13 +227,7 @@ def read_tag(fid, pos=None):
#
# Omit nulls
#
- length = 16
- for k in range(16):
- if ch_name(k) == 0:
- length = k-1
- break
- tag.data['ch_name'] = ch_name[1:length]
- import pdb; pdb.set_trace()
+ tag.data['ch_name'] = ''.join(ch_name[:np.where(ch_name == '')[0][0]])
elif tag.type == FIFF.FIFFT_OLD_PACK:
offset = np.fromfile(fid, dtype=">f4", count=1)
@@ -183,3 +247,18 @@ def read_tag(fid, pos=None):
fid.seek(tag.next, 1) # XXX : fix? pb when tag.next < 0
return tag
+
+
+def find_tag(fid, node, findkind):
+ for p in range(node.nent):
+ if node.directory[p].kind == findkind:
+ return read_tag(fid, node.directory[p].pos)
+ tag = None;
+ return tag
+
+
+def has_tag(this, findkind):
+ for p in range(this.nent):
+ if this.directory[p].kind == findkind:
+ return True
+ return False
diff --git a/fiff/tree.py b/fiff/tree.py
index 9919d4c..c211df2 100644
--- a/fiff/tree.py
+++ b/fiff/tree.py
@@ -1,17 +1,39 @@
from .bunch import Bunch
from .read_tag import read_tag
-def make_dir_tree(fid, directory, start=0, indent=0):
+
+def dir_tree_find(tree, kind):
+ """[nodes] = dir_tree_find(tree,kind)
+
+ Find nodes of the given kind from a directory tree structure
+
+ Returns a list of matching nodes
+ """
+ nodes = []
+
+ if isinstance(tree, list):
+ for t in tree:
+ nodes += dir_tree_find(t, kind)
+ else:
+ # Am I desirable myself?
+ if tree.block == kind:
+ nodes.append(tree)
+
+ # Search the subtrees
+ for child in tree.children:
+ nodes += dir_tree_find(child, kind)
+ return nodes
+
+
+def make_dir_tree(fid, directory, start=0, indent=0, verbose=True):
"""Create the directory tree structure
"""
- FIFF_BLOCK_START = 104
- FIFF_BLOCK_END = 105
- FIFF_FILE_ID = 100
- FIFF_BLOCK_ID = 103
+ FIFF_BLOCK_START = 104
+ FIFF_BLOCK_END = 105
+ FIFF_FILE_ID = 100
+ FIFF_BLOCK_ID = 103
FIFF_PARENT_BLOCK_ID = 110
- verbose = 0
-
if directory[start].kind == FIFF_BLOCK_START:
tag = read_tag(fid, directory[start].pos)
block = tag.data
@@ -23,7 +45,6 @@ def make_dir_tree(fid, directory, start=0, indent=0):
print '\t'
print 'start { %d\n' % block
- nchild = 0
this = start
tree = Bunch()
@@ -33,21 +54,20 @@ def make_dir_tree(fid, directory, start=0, indent=0):
tree['nent'] = 0
tree['nchild'] = 0
tree['directory'] = directory[this]
- tree['children'] = Bunch(block=None, id=None, parent_id=None, nent=None,
- nchild=None, directory=None, children=None)
+ tree['children'] = []
while this < len(directory):
if directory[this].kind == FIFF_BLOCK_START:
if this != start:
child, this = make_dir_tree(fid, directory, this, indent+1)
- tree.nchild = tree.nchild + 1
- tree.children[tree.nchild] = child
+ tree.nchild += 1
+ tree.children.append(child)
elif directory[this].kind == FIFF_BLOCK_END:
tag = read_tag(fid, directory[start].pos)
if tag.data == block:
break
else:
- tree.nent = tree.nent + 1
+ tree.nent += 1
if tree.nent == 1:
tree.directory = list()
tree.directory.append(directory[this])
@@ -65,12 +85,12 @@ def make_dir_tree(fid, directory, start=0, indent=0):
elif directory[this].kind == FIFF_PARENT_BLOCK_ID:
tag = read_tag(fid, directory[this].pos)
tree.parent_id = tag.data
- this = this + 1
+ this += 1
#
# Eliminate the empty directory
#
if tree.nent == 0:
- tree.directory = []
+ tree.directory = None
if verbose:
for k in range(indent+1):
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/python-mne.git
More information about the debian-med-commit
mailing list