[pymvpa] extracting sample predictions from a searchlight
Brian Murphy
brian.murphy at qub.ac.uk
Wed Apr 6 13:09:35 UTC 2016
Hi,
> > Should I instead be writing a custom function to read out and record
> > from each instance of the CrossValidation, with its postproc attribute
> > (e.g. via CrossValidation.stats.sets)?
>
> quick one if I got it right -- may be just to provide errorfx=None to
> CrossValidation so it doesn't estimate any error and just spits out all
> the estimates and then you could compute errors yourself... ?
Cool - I've tried that and it kind of works, but throws a "TypeError:
cannot perform reduce with flexible type" when its wrapping up after
computing all the searchlights (see the first code snippet and errors
below).
Then I also tried putting in a custom function as errorfx just to print
out, and tag them on a list for later processing, and it did manage to
read out the set of estimates from the first fold-wise partition of the
first searchlight... but then it threw this error "ValueError:
AttrDataset only supports dtypes as samples that have a `dtype`
attribute that behaves similar to the one of an array-like." ... again
the full output is below.
SNIPPET USING errorfx=None:
> ...: miniDs = ds.copy(deep=True)
> ...: miniDs = miniDs[:,:10]
> ...: searchLightSize = 3
> ...: clf = PLR();
> ...: cv = CrossValidation(clf,
> ...: NFoldPartitioner(),
> ...: errorfx=None,
> ...: enable_ca=['probabilities', 'samples_error','stats', 'calling_time','confusion', 'estimates', 'predictions', 'repetition_results', 'raw_results', 'null_prob']
> ...: )
> ...: sl = sphere_searchlight(cv, radius=searchLightSize, postproc=mean_sample(), nproc=1)
> ...: sl_map = sl(miniDs)
>
> [SLC] DBG: Starting computing block for 10 elements
> [SLC] DBG: +0:00:00 ======[100%]====== 0:00:00 ROI 10 (10/10), 10 features
>
> [SLC] DBG: hstacking 10 results of shape (462, 1)
> [SLC] DBG: hstacked shape (462, 10)
> Traceback (most recent call last):
>
> File "<ipython-input-23-84c10f22f821>", line 23, in <module>
> sl_map = sl(miniDs)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/base/learner.py", line 258, in __call__
> return super(Learner, self).__call__(ds)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/base/node.py", line 124, in __call__
> result = self._postcall(ds, result)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/measures/base.py", line 128, in _postcall
> result = super(Measure, self)._postcall(dataset, result)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/base/node.py", line 165, in _postcall
> result = self._apply_postproc(ds, result)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/base/node.py", line 239, in _apply_postproc
> result = self.__postproc(result)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/base/learner.py", line 258, in __call__
> return super(Learner, self).__call__(ds)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/base/node.py", line 123, in __call__
> result = self._call(ds)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/mappers/base.py", line 291, in _call
> return self.forward(ds)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/mappers/base.py", line 215, in forward
> return self._forward_dataset(data)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/mappers/fx.py", line 164, in _forward_dataset
> mdata, sattrs = self._forward_dataset_full(ds)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/mappers/fx.py", line 282, in _forward_dataset_full
> mdata = self._forward_data(ds.samples)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/mappers/fx.py", line 152, in _forward_data
> mdata = self.__smart_apply_along_axis(data)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/mappers/fx.py", line 135, in __smart_apply_along_axis
> mdata = fx(data, naxis, *self.__fxargs)
>
> File "/usr/local/lib/python2.7/dist-packages/numpy/core/fromnumeric.py", line 2878, in mean
> out=out, keepdims=keepdims)
>
> File "/usr/local/lib/python2.7/dist-packages/numpy/core/_methods.py", line 65, in _mean
> ret = umr_sum(arr, axis, dtype, out, keepdims)
>
> TypeError: cannot perform reduce with flexible type
SNIPPET USING CUSTOM FUNCTION TO HANDLE CLASSIFIER ESTIMATES:
> ...: collectedEstimates = []
> ...: def handleEstimates(predictions, targets):
> ...: print (predictions, targets)
> ...: collectedEstimates.append((predictions, targets))
>
> ...: # searchlight classifier
> ...: searchLightSize = 3
> ...: clf = PLR(); #kNN(k=9, dfx=one_minus_correlation, voting='majority');
> ...: cv = CrossValidation(clf,
> ...: NFoldPartitioner(),
> ...: errorfx=handleEstimates,
> ...: enable_ca=['probabilities', 'samples_error','stats', 'calling_time','confusion', 'estimates', 'predictions', 'repetition_results', 'raw_results', 'null_prob']
> ...: )
> ...: sl = sphere_searchlight(cv, radius=searchLightSize, postproc=mean_sample(), nproc=1)
> ...: sl_map = sl(miniDs)
> ...:
>
> [SLC] DBG: Starting computing block for 10 elements
> (array(['subject', 'subject', 'subject', 'subject', 'subject', 'subject',
> 'object', 'object', 'object', 'object', 'object', 'object',
> 'object', 'object', 'subject', 'subject', 'subject', 'subject',
> 'subject', 'subject', 'subject', 'subject', 'subject', 'subject',
> 'object', 'object', 'object', 'subject', 'subject', 'subject',
> 'subject', 'subject', 'object', 'object', 'subject', 'subject',
> 'object', 'object', 'object', 'object', 'object', 'object',
> 'subject', 'object', 'object', 'subject', 'subject', 'subject',
> 'object', 'object', 'object', 'object', 'object', 'subject',
> 'subject', 'subject', 'subject', 'subject', 'subject', 'subject',
> 'subject', 'object', 'object', 'object', 'subject', 'subject',
> 'object', 'object', 'object', 'subject', 'subject', 'subject',
> 'subject', 'subject', 'subject', 'subject', 'object', 'object',
> 'subject', 'object', 'subject', 'subject', 'object', 'subject',
> 'subject', 'object', 'object', 'object', 'subject', 'subject'],
> dtype='|S7'), array(['subject', 'object', 'subject', 'subject', 'subject', 'object',
> 'subject', 'subject', 'subject', 'subject', 'subject', 'object',
> 'object', 'object', 'subject', 'subject', 'subject', 'object',
> 'subject', 'object', 'subject', 'subject', 'object', 'subject',
> 'subject', 'subject', 'subject', 'subject', 'object', 'subject',
> 'subject', 'object', 'subject', 'subject', 'object', 'object',
> 'subject', 'subject', 'subject', 'object', 'object', 'subject',
> 'subject', 'subject', 'subject', 'subject', 'object', 'subject',
> 'subject', 'subject', 'subject', 'object', 'subject', 'subject',
> 'subject', 'subject', 'object', 'subject', 'object', 'subject',
> 'subject', 'subject', 'subject', 'object', 'object', 'object',
> 'object', 'object', 'object', 'object', 'object', 'object',
> 'object', 'object', 'object', 'object', 'object', 'object',
> 'object', 'object', 'object', 'object', 'object', 'object',
> 'object', 'object', 'object', 'object', 'object', 'object'],
> dtype='|S7'))
> Traceback (most recent call last):
>
> File "<ipython-input-24-803e9d1b4320>", line 23, in <module>
> sl_map = sl(miniDs)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/base/learner.py", line 258, in __call__
> return super(Learner, self).__call__(ds)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/base/node.py", line 123, in __call__
> result = self._call(ds)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/measures/searchlight.py", line 154, in _call
> results = self._sl_call(dataset, roi_ids, nproc)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/measures/searchlight.py", line 375, in _sl_call
> self._proc_block(roi_ids, dataset, self.__datameasure)]
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/measures/searchlight.py", line 471, in _proc_block
> res = measure(roi)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/base/learner.py", line 258, in __call__
> return super(Learner, self).__call__(ds)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/base/node.py", line 123, in __call__
> result = self._call(ds)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/measures/base.py", line 514, in _call
> return super(CrossValidation, self)._call(ds)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/measures/base.py", line 337, in _call
> result = node(sds)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/base/learner.py", line 258, in __call__
> return super(Learner, self).__call__(ds)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/base/node.py", line 124, in __call__
> result = self._postcall(ds, result)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/measures/base.py", line 128, in _postcall
> result = super(Measure, self)._postcall(dataset, result)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/base/node.py", line 165, in _postcall
> result = self._apply_postproc(ds, result)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/base/node.py", line 239, in _apply_postproc
> result = self.__postproc(result)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/base/node.py", line 123, in __call__
> result = self._call(ds)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/mappers/fx.py", line 640, in _call
> return Dataset(err)
>
> File "/usr/lib/python2.7/dist-packages/mvpa2/base/dataset.py", line 217, in __init__
> "AttrDataset only supports dtypes as samples that have a "
>
> ValueError: AttrDataset only supports dtypes as samples that have a `dtype` attribute that behaves similar to the one of an array-like.
>
More information about the Pkg-ExpPsy-PyMVPA
mailing list