[pymvpa] SL on an artificial dataset
Matthias Ekman
matthias.ekman at googlemail.com
Tue Apr 3 08:22:28 UTC 2012
Thanks Michael,
I guess I am missing something very obvious here. With the following
code I get the error below (it doesn't seem to be related to 'mycoord').
from sklearn.datasets import make_classification
from mvpa2.suite import *
X,y = make_classification(n_samples=50, n_features=630, n_classes=2)
ds = Dataset(X)
ds.targets = y
ds.chunks = np.arange(50)
cv = CrossValidation(LinearCSVMC(C=1), NFoldPartitioner())
# print cv(ds) # same error
ds.fa['mycoord'] = np.arange(ds.nfeatures)
sl = sphere_searchlight(cv, radius=2, space='mycoord',nproc=1)
sl_map = sl(ds)
In [3]: %run tmp.py
ERROR: An unexpected error occurred while tokenizing input
The following traceback may be corrupted or invalid
The error message is: ('EOF in multi-line statement', (100, 0))
ERROR: An unexpected error occurred while tokenizing input
The following traceback may be corrupted or invalid
The error message is: ('EOF in multi-line statement', (163, 0))
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
/vol/biopsy/me/tmp.py in <module>()
11 ds.fa['mycoord'] = np.arange(ds.nfeatures)
12 sl = sphere_searchlight(cv, radius=2, space='mycoord',nproc=1)
---> 13 sl_map = sl(ds)
14
15
/vol/biopsy/me/apps/PyMVPA/mvpa2/base/learner.pyc in __call__(self, ds)
235 "used and auto training is
disabled."
236 % str(self))
--> 237 return super(Learner, self).__call__(ds)
238
239
/vol/biopsy/me/apps/PyMVPA/mvpa2/base/node.pyc in __call__(self, ds)
78
79 self._precall(ds)
---> 80 result = self._call(ds)
81 result = self._postcall(ds, result)
82
/vol/biopsy/me/apps/PyMVPA/mvpa2/measures/searchlight.pyc in _call(self,
dataset)
123
124 # pass to subclass
--> 125 results, roi_sizes = self._sl_call(dataset, roi_ids, nproc)
126
127 if not roi_sizes is None:
/vol/biopsy/me/apps/PyMVPA/mvpa2/measures/searchlight.pyc in
_sl_call(self, dataset, roi_ids, nproc)
239 # otherwise collect the results in a list
240 results, roi_sizes = \
--> 241 self._proc_block(roi_ids, dataset,
self.__datameasure)
242
243 if __debug__ and 'SLC' in debug.active:
/vol/biopsy/me/apps/PyMVPA/mvpa2/measures/searchlight.pyc in
_proc_block(self, block, ds, measure)
291
292 # compute the datameasure and store in results
--> 293 results.append(measure(roi))
294
295 # store the size of the roi dataset
/vol/biopsy/me/apps/PyMVPA/mvpa2/base/learner.pyc in __call__(self, ds)
235 "used and auto training is
disabled."
236 % str(self))
--> 237 return super(Learner, self).__call__(ds)
238
239
/vol/biopsy/me/apps/PyMVPA/mvpa2/base/node.pyc in __call__(self, ds)
78
79 self._precall(ds)
---> 80 result = self._call(ds)
81 result = self._postcall(ds, result)
82
/vol/biopsy/me/apps/PyMVPA/mvpa2/measures/base.pyc in _call(self, ds)
465 # always untrain to wipe out previous stats
466 self.untrain()
--> 467 return super(CrossValidation, self)._call(ds)
468
469
/vol/biopsy/me/apps/PyMVPA/mvpa2/measures/base.pyc in _call(self, ds)
290 # run the node an all generated datasets
291 results = []
--> 292 for i, sds in enumerate(generator.generate(ds)):
293 if __debug__:
294 debug('REPM', "%d-th iteration of %s on %s",
/vol/biopsy/me/apps/PyMVPA/mvpa2/generators/partition.pyc in
generate(self, ds)
111 def generate(self, ds):
112 # for each split
--> 113 cfgs = self.get_partition_specs(ds)
114 n_cfgs = len(cfgs)
115
/vol/biopsy/me/apps/PyMVPA/mvpa2/generators/partition.pyc in
get_partition_specs(self, ds)
186 """
187 # list (#splits) of lists (#partitions)
--> 188 cfgs = self._get_partition_specs(ds.sa[self.__attr].unique)
189
190 # Select just some splits if desired
KeyError: 'chunks'
More information about the Pkg-ExpPsy-PyMVPA
mailing list