[pymvpa] Memory Error when Loading fMRI dataset

Jason Ozubko jozubko at research.baycrest.org
Mon Jan 13 23:22:35 UTC 2014


Hello,

I'm currently trying to setup my first MVPA analysis after working through
a few tutorials and I'm hitting a snag at the first stage, loading in the
data!  I've got a rather long experiment (~80 minutes, 2.2 s TR, broken
into about 5 sessions/runs).  I do some minimal preprocessing in SPM before
outputting 4D nii files that contain the functional data that I want to use
in my MVPA analyses.

My initial idea was to concatenate all the runs into 1 big .nii and then
load that in as a single dataset using fmri_dataset however when I try
this, I get a memory error (see the attached txt file for a full output)

I have since learned that python does not seem to crash if I try loading in
the runs individually (i.e., if I have 5 separate .nii files, each
containing functional data from a different run).  I am wondering, if I
loaded in the runs individually, and then I concatenated the loaded
datasets into one big dataset and then added the sa.targets and sa.chunks
properties appropriately, could I then proceed to the next steps of my
analysis and carry forward normally, or will there be issues with the fact
that I have concatenated the datasets together?

Cheers,
Jason
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.alioth.debian.org/pipermail/pkg-exppsy-pymvpa/attachments/20140113/0c924611/attachment.html>
-------------- next part --------------
In [9]: ds = fmri_dataset(os.path.join(path, 'ppat-wraf10044-allruns.nii.gz'), mask=os.path.join(path,'mask_hippR_2x.nii'))
/usr/lib/pymodules/python2.7/nibabel/volumeutils.py:481: RuntimeWarning: overflow encountered in int_scalars
  datasize = int(np.prod(shape) * in_dtype.itemsize)
---------------------------------------------------------------------------
MemoryError                               Traceback (most recent call last)
<ipython-input-9-caf956a16ece> in <module>()
----> 1 ds = fmri_dataset(os.path.join(path, 'ppat-wraf10044-allruns.nii.gz'), mask=os.path.join(path,'mask_hippR_2x.nii'))

/usr/lib/pymodules/python2.7/mvpa2/datasets/mri.pyc in fmri_dataset(samples, targets, chunks, mask, sprefix, tprefix, add_fa)
    203     """
    204     # load the samples
--> 205     imgdata, imghdr, imgtype = _load_anyimg(samples, ensure=True, enforce_dim=4)
    206 
    207     # figure out what the mask is, but only handle known cases, the rest

/usr/lib/pymodules/python2.7/mvpa2/datasets/mri.pyc in _load_anyimg(src, ensure, enforce_dim)
    336         # try opening the beast; this might yield none in case of an unsupported
    337         # argument and is handled accordingly below
--> 338         data = _img2data(src)
    339         if not data is None:
    340             imgdata, imghdr, imgtype = data

/usr/lib/pymodules/python2.7/mvpa2/datasets/mri.pyc in _img2data(src)
     64     if isinstance(img, nibabel.spatialimages.SpatialImage):
     65         # nibabel image, dissect and return pieces
---> 66         return _get_txyz_shaped(img.get_data()), img.get_header(), img.__class__
     67     else:
     68         # no clue what it is

/usr/lib/pymodules/python2.7/nibabel/spatialimages.pyc in get_data(self)
    339 
    340     def get_data(self):
--> 341         return np.asanyarray(self._data)
    342 
    343     @property

/usr/lib/pymodules/python2.7/numpy/core/numeric.pyc in asanyarray(a, dtype, order)
    285 
    286     """
--> 287     return array(a, dtype, copy=False, order=order, subok=True)
    288 
    289 def ascontiguousarray(a, dtype=None):

/usr/lib/pymodules/python2.7/nibabel/arrayproxy.pyc in __array__(self)
     53         ''' Cached read of data from file '''
     54         if self._data is None:
---> 55             self._data = self._read_data()
     56         return self._data
     57 

/usr/lib/pymodules/python2.7/nibabel/arrayproxy.pyc in _read_data(self)
     58     def _read_data(self):
     59         fileobj = allopen(self.file_like)
---> 60         data = self.header.data_from_fileobj(fileobj)
     61         if isinstance(self.file_like, basestring):  # filename
     62             fileobj.close()

/usr/lib/pymodules/python2.7/nibabel/analyze.pyc in data_from_fileobj(self, fileobj)
    484         '''
    485         # read unscaled data
--> 486         data = self.raw_data_from_fileobj(fileobj)
    487         # get scalings from header.  Value of None means not present in header
    488         slope, inter = self.get_slope_inter()

/usr/lib/pymodules/python2.7/nibabel/analyze.pyc in raw_data_from_fileobj(self, fileobj)
    456         shape = self.get_data_shape()
    457         offset = self.get_data_offset()
--> 458         return array_from_file(shape, dtype, fileobj, offset)
    459 
    460     def data_from_fileobj(self, fileobj):

/usr/lib/pymodules/python2.7/nibabel/volumeutils.pyc in array_from_file(shape, in_dtype, infile, offset, order)
    482         if datasize == 0:
    483             return np.array([])
--> 484         data_str = infile.read(datasize)
    485         if len(data_str) != datasize:
    486             if hasattr(infile, 'name'):

/usr/lib/python2.7/gzip.pyc in read(self, size)
    247             try:
    248                 while True:
--> 249                     self._read(readsize)
    250                     readsize = min(self.max_read_chunk, readsize * 2)
    251             except EOFError:

/usr/lib/python2.7/gzip.pyc in _read(self, size)
    306 
    307         uncompress = self.decompress.decompress(buf)
--> 308         self._add_read_data( uncompress )
    309 
    310         if self.decompress.unused_data != "":

/usr/lib/python2.7/gzip.pyc in _add_read_data(self, data)
    324         self.crc = zlib.crc32(data, self.crc) & 0xffffffffL
    325         offset = self.offset - self.extrastart
--> 326         self.extrabuf = self.extrabuf[offset:] + data
    327         self.extrasize = self.extrasize + len(data)
    328         self.extrastart = self.offset

MemoryError: 


More information about the Pkg-ExpPsy-PyMVPA mailing list